hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a7e9ac32c4b56ba1a8e25f300b0d5b5ee9f6bdce | 7,340 | py | Python | test/TestNeighbours.py | GeorgiyDemo/cv_algorithms | 7b70704b37c88cc6f1e253e98aae0d1f766ff9a1 | [
"Apache-2.0"
] | 9 | 2018-01-03T10:42:55.000Z | 2022-03-09T19:05:48.000Z | test/TestNeighbours.py | GeorgiyDemo/cv_algorithms | 7b70704b37c88cc6f1e253e98aae0d1f766ff9a1 | [
"Apache-2.0"
] | 4 | 2018-10-25T20:55:14.000Z | 2022-01-23T20:48:11.000Z | test/TestNeighbours.py | GeorgiyDemo/cv_algorithms | 7b70704b37c88cc6f1e253e98aae0d1f766ff9a1 | [
"Apache-2.0"
] | 15 | 2018-06-26T02:29:20.000Z | 2022-01-19T02:35:27.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import io
from numpy.testing import assert_approx_equal, assert_allclose, assert_array_equal
from nose.tools import assert_equal, assert_true, assert_false, assert_greater, assert_less
import cv2
import cv_algorithms
from cv_algorithms import Neighbours, Direction
import numpy as np
class TestNeighbours(object):
def test_binary_neighbours_simple(self):
"""Test binary direction detection"""
img = np.zeros((10,8), dtype=np.uint8)
y, x = 5, 4
img[5,4] = 255
# Currently just test whether it crashes
directions = cv_algorithms.binary_neighbours(img)
print(directions)
assert_equal(0, directions[0,0])
# NOTE: Directions are inversed, this is not a mistake
# To the pixel on the NW of the white pixel
# the white pixel is SE!
# Center
assert_equal(0, directions[5, 4])
# NW of the white pixel
assert_equal((y-1, x-1), Neighbours.northwest_coords(y, x))
assert_equal((y-1, x-1), Neighbours.coords(Direction.NorthWest, y, x))
assert_equal((1 << 7), directions[y-1, x-1])
assert_false(Neighbours.is_northwest(directions[y-1, x-1]))
assert_false(Neighbours.is_north(directions[y-1, x-1]))
assert_false(Neighbours.is_northeast(directions[y-1, x-1]))
assert_false(Neighbours.is_west(directions[y-1, x-1]))
assert_false(Neighbours.is_east(directions[y-1, x-1]))
assert_false(Neighbours.is_southwest(directions[y-1, x-1]))
assert_false(Neighbours.is_south(directions[y-1, x-1]))
assert_true(Neighbours.is_southeast(directions[y-1, x-1]))
# N of the white pixel
assert_equal((y-1, x), Neighbours.north_coords(y, x))
assert_equal((y-1, x), Neighbours.coords(Direction.North, y, x))
assert_equal((1 << 6), directions[y-1, x])
assert_false(Neighbours.is_northwest(directions[y-1, x]))
assert_false(Neighbours.is_north(directions[y-1, x]))
assert_false(Neighbours.is_northeast(directions[y-1, x]))
assert_false(Neighbours.is_west(directions[y-1, x]))
assert_false(Neighbours.is_east(directions[y-1, x]))
assert_false(Neighbours.is_southwest(directions[y-1, x]))
assert_true(Neighbours.is_south(directions[y-1, x]))
assert_false(Neighbours.is_southeast(directions[y-1, x]))
# NE of the white pixel
assert_equal((y-1, x+1), Neighbours.northeast_coords(y, x))
assert_equal((y-1, x+1), Neighbours.coords(Direction.NorthEast, y, x))
assert_equal((1 << 5), directions[y-1, x+1])
assert_false(Neighbours.is_northwest(directions[y-1, x+1]))
assert_false(Neighbours.is_north(directions[y-1, x+1]))
assert_false(Neighbours.is_northeast(directions[y-1, x+1]))
assert_false(Neighbours.is_west(directions[y-1, x+1]))
assert_false(Neighbours.is_east(directions[y-1, x+1]))
assert_true(Neighbours.is_southwest(directions[y-1, x+1]))
assert_false(Neighbours.is_south(directions[y-1, x+1]))
assert_false(Neighbours.is_southeast(directions[y-1, x+1]))
# W of the white pixel
assert_equal((y, x-1), Neighbours.west_coords(y, x))
assert_equal((y, x-1), Neighbours.coords(Direction.West, y, x))
assert_equal((1 << 4), directions[y, x-1])
assert_false(Neighbours.is_northwest(directions[y, x-1]))
assert_false(Neighbours.is_north(directions[y, x-1]))
assert_false(Neighbours.is_northeast(directions[y, x-1]))
assert_false(Neighbours.is_west(directions[y, x-1]))
assert_true(Neighbours.is_east(directions[y, x-1]))
assert_false(Neighbours.is_southwest(directions[y, x-1]))
assert_false(Neighbours.is_south(directions[y, x-1]))
assert_false(Neighbours.is_southeast(directions[y, x-1]))
# E of the white pixel
assert_equal((y, x+1), Neighbours.east_coords(y, x))
assert_equal((y, x+1), Neighbours.coords(Direction.East, y, x))
assert_equal((1 << 3), directions[y, x+1])
assert_false(Neighbours.is_northwest(directions[y, x+1]))
assert_false(Neighbours.is_north(directions[y, x+1]))
assert_false(Neighbours.is_northeast(directions[y, x+1]))
assert_true(Neighbours.is_west(directions[y, x+1]))
assert_false(Neighbours.is_east(directions[y, x+1]))
assert_false(Neighbours.is_southwest(directions[y, x+1]))
assert_false(Neighbours.is_south(directions[y, x+1]))
assert_false(Neighbours.is_southeast(directions[y, x+1]))
# SW of the white pixel
assert_equal((y+1, x-1), Neighbours.southwest_coords(y, x))
assert_equal((y+1, x-1), Neighbours.coords(Direction.SouthWest, y, x))
assert_equal((1 << 2), directions[y+1, x-1])
assert_false(Neighbours.is_northwest(directions[y+1, x-1]))
assert_false(Neighbours.is_north(directions[y+1, x-1]))
assert_true(Neighbours.is_northeast(directions[y+1, x-1]))
assert_false(Neighbours.is_west(directions[y+1, x-1]))
assert_false(Neighbours.is_east(directions[y+1, x-1]))
assert_false(Neighbours.is_southwest(directions[y+1, x-1]))
assert_false(Neighbours.is_south(directions[y+1, x-1]))
assert_false(Neighbours.is_southeast(directions[y+1, x-1]))
# S of the white pixel
assert_equal((y+1, x), Neighbours.south_coords(y, x))
assert_equal((y+1, x), Neighbours.coords(Direction.South, y, x))
assert_equal((1 << 1), directions[y+1, x])
assert_false(Neighbours.is_northwest(directions[y+1, x]))
assert_true(Neighbours.is_north(directions[y+1, x]))
assert_false(Neighbours.is_northeast(directions[y+1, x]))
assert_false(Neighbours.is_west(directions[y+1, x]))
assert_false(Neighbours.is_east(directions[y+1, x]))
assert_false(Neighbours.is_southwest(directions[y+1, x]))
assert_false(Neighbours.is_south(directions[y+1, x]))
assert_false(Neighbours.is_southeast(directions[y+1, x]))
# SE of the white pixel
assert_equal((y+1, x+1), Neighbours.southeast_coords(y, x))
assert_equal((y+1, x+1), Neighbours.coords(Direction.SouthEast, y, x))
assert_equal((1 << 0), directions[y+1, x+1])
assert_true(Neighbours.is_northwest(directions[y+1, x+1]))
assert_false(Neighbours.is_north(directions[y+1, x+1]))
assert_false(Neighbours.is_northeast(directions[y+1, x+1]))
assert_false(Neighbours.is_west(directions[y+1, x+1]))
assert_false(Neighbours.is_east(directions[y+1, x+1]))
assert_false(Neighbours.is_southwest(directions[y+1, x+1]))
assert_false(Neighbours.is_south(directions[y+1, x+1]))
assert_false(Neighbours.is_southeast(directions[y+1, x+1]))
def test_binary_neighbours_corner(self):
# Just test if it crashes for something in the corners
img = np.zeros((10,8), dtype=np.uint8)
img[9,7] = 255
img[0,0] = 255
cv_algorithms.binary_neighbours(img)
def test_direction_str(self):
assert_equal("↑", str(Direction.North))
assert_equal(Direction.North, Direction.from_unicode("↑"))
assert_equal([Direction.SouthEast, Direction.North], Direction.from_unicode("↘↑")) | 55.18797 | 91 | 0.670163 | 1,088 | 7,340 | 4.349265 | 0.096507 | 0.167371 | 0.041843 | 0.272189 | 0.818893 | 0.763736 | 0.763736 | 0.763736 | 0.737532 | 0.715765 | 0 | 0.030501 | 0.187057 | 7,340 | 133 | 92 | 55.18797 | 0.761857 | 0.063352 | 0 | 0.017699 | 0 | 0 | 0.000584 | 0 | 0 | 0 | 0 | 0 | 0.840708 | 1 | 0.026549 | false | 0 | 0.061947 | 0 | 0.097345 | 0.00885 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a7eb31303931dae53bdf49563afc00724517ec4d | 58,209 | py | Python | huaweicloud-sdk-gaussdbfornosql/huaweicloudsdkgaussdbfornosql/v3/gaussdbfornosql_client.py | NQLoong/huaweicloud-sdk-python-v3 | 677944a0b722147c6e105c53df9110724d64152a | [
"Apache-2.0"
] | 1 | 2021-11-03T07:54:50.000Z | 2021-11-03T07:54:50.000Z | huaweicloud-sdk-gaussdbfornosql/huaweicloudsdkgaussdbfornosql/v3/gaussdbfornosql_client.py | mawenbo-huawei/huaweicloud-sdk-python-v3 | 677944a0b722147c6e105c53df9110724d64152a | [
"Apache-2.0"
] | null | null | null | huaweicloud-sdk-gaussdbfornosql/huaweicloudsdkgaussdbfornosql/v3/gaussdbfornosql_client.py | mawenbo-huawei/huaweicloud-sdk-python-v3 | 677944a0b722147c6e105c53df9110724d64152a | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class GaussDBforNoSQLClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(GaussDBforNoSQLClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkgaussdbfornosql.v3.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls)
if clazz.__name__ != "GaussDBforNoSQLClient":
raise TypeError("client type error, support client type is GaussDBforNoSQLClient")
return ClientBuilder(clazz)
def apply_configuration(self, request):
"""应用参数模板
将参数模板应用到实例,可以指定一个或多个实例。
:param ApplyConfigurationRequest request
:return: ApplyConfigurationResponse
"""
return self.apply_configuration_with_http_info(request)
def apply_configuration_with_http_info(self, request):
"""应用参数模板
将参数模板应用到实例,可以指定一个或多个实例。
:param ApplyConfigurationRequest request
:return: ApplyConfigurationResponse
"""
all_params = ['config_id', 'apply_configuration_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}/apply',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ApplyConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_tag_action(self, request):
"""批量添加或删除资源标签
批量添加或删除指定数据库实例的标签。
:param BatchTagActionRequest request
:return: BatchTagActionResponse
"""
return self.batch_tag_action_with_http_info(request)
def batch_tag_action_with_http_info(self, request):
"""批量添加或删除资源标签
批量添加或删除指定数据库实例的标签。
:param BatchTagActionRequest request
:return: BatchTagActionResponse
"""
all_params = ['instance_id', 'batch_tag_action_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/tags/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchTagActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_configuration(self, request):
"""创建参数模板
创建参数模板。
:param CreateConfigurationRequest request
:return: CreateConfigurationResponse
"""
return self.create_configuration_with_http_info(request)
def create_configuration_with_http_info(self, request):
"""创建参数模板
创建参数模板。
:param CreateConfigurationRequest request
:return: CreateConfigurationResponse
"""
all_params = ['create_configuration_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_instance(self, request):
"""创建实例
创建文档数据库实例,包括集群实例、副本集实例、以及单节点实例。
:param CreateInstanceRequest request
:return: CreateInstanceResponse
"""
return self.create_instance_with_http_info(request)
def create_instance_with_http_info(self, request):
"""创建实例
创建文档数据库实例,包括集群实例、副本集实例、以及单节点实例。
:param CreateInstanceRequest request
:return: CreateInstanceResponse
"""
all_params = ['create_instance_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_configuration(self, request):
"""删除参数模板
删除指定参数模板。
:param DeleteConfigurationRequest request
:return: DeleteConfigurationResponse
"""
return self.delete_configuration_with_http_info(request)
def delete_configuration_with_http_info(self, request):
"""删除参数模板
删除指定参数模板。
:param DeleteConfigurationRequest request
:return: DeleteConfigurationResponse
"""
all_params = ['config_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_instance(self, request):
"""删除实例
删除数据库实例。
:param DeleteInstanceRequest request
:return: DeleteInstanceResponse
"""
return self.delete_instance_with_http_info(request)
def delete_instance_with_http_info(self, request):
"""删除实例
删除数据库实例。
:param DeleteInstanceRequest request
:return: DeleteInstanceResponse
"""
all_params = ['instance_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def expand_instance_node(self, request):
"""扩容指定集群实例的节点数量
扩容指定集群实例的节点数量。
:param ExpandInstanceNodeRequest request
:return: ExpandInstanceNodeResponse
"""
return self.expand_instance_node_with_http_info(request)
def expand_instance_node_with_http_info(self, request):
"""扩容指定集群实例的节点数量
扩容指定集群实例的节点数量。
:param ExpandInstanceNodeRequest request
:return: ExpandInstanceNodeResponse
"""
all_params = ['instance_id', 'enlarge_instance_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/enlarge-node',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ExpandInstanceNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_configurations(self, request):
"""获取参数模板列表
获取参数模板列表,包括所有数据库的默认参数模板和用户创建的参数模板。
:param ListConfigurationsRequest request
:return: ListConfigurationsResponse
"""
return self.list_configurations_with_http_info(request)
def list_configurations_with_http_info(self, request):
"""获取参数模板列表
获取参数模板列表,包括所有数据库的默认参数模板和用户创建的参数模板。
:param ListConfigurationsRequest request
:return: ListConfigurationsResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListConfigurationsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_datastores(self, request):
"""查询指定实例类型的数据库版本信息
查询指定实例类型的数据库版本信息。
:param ListDatastoresRequest request
:return: ListDatastoresResponse
"""
return self.list_datastores_with_http_info(request)
def list_datastores_with_http_info(self, request):
"""查询指定实例类型的数据库版本信息
查询指定实例类型的数据库版本信息。
:param ListDatastoresRequest request
:return: ListDatastoresResponse
"""
all_params = ['datastore_name']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'datastore_name' in local_var_params:
path_params['datastore_name'] = local_var_params['datastore_name']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/datastores/{datastore_name}/versions',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDatastoresResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_flavors(self, request):
"""查询指定条件下的所有实例规格信息
查询指定条件下的所有实例规格信息。
:param ListFlavorsRequest request
:return: ListFlavorsResponse
"""
return self.list_flavors_with_http_info(request)
def list_flavors_with_http_info(self, request):
"""查询指定条件下的所有实例规格信息
查询指定条件下的所有实例规格信息。
:param ListFlavorsRequest request
:return: ListFlavorsResponse
"""
all_params = ['region', 'engine_name']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
if 'engine_name' in local_var_params:
query_params.append(('engine_name', local_var_params['engine_name']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/flavors',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListFlavorsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_instance_tags(self, request):
"""查询资源标签
查询指定实例的标签信息。
:param ListInstanceTagsRequest request
:return: ListInstanceTagsResponse
"""
return self.list_instance_tags_with_http_info(request)
def list_instance_tags_with_http_info(self, request):
"""查询资源标签
查询指定实例的标签信息。
:param ListInstanceTagsRequest request
:return: ListInstanceTagsResponse
"""
all_params = ['instance_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/tags',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListInstanceTagsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_instances(self, request):
"""查询实例列表
根据指定条件查询数据库实例列表。
:param ListInstancesRequest request
:return: ListInstancesResponse
"""
return self.list_instances_with_http_info(request)
def list_instances_with_http_info(self, request):
"""查询实例列表
根据指定条件查询数据库实例列表。
:param ListInstancesRequest request
:return: ListInstancesResponse
"""
all_params = ['id', 'name', 'mode', 'datastore_type', 'vpc_id', 'subnet_id', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'mode' in local_var_params:
query_params.append(('mode', local_var_params['mode']))
if 'datastore_type' in local_var_params:
query_params.append(('datastore_type', local_var_params['datastore_type']))
if 'vpc_id' in local_var_params:
query_params.append(('vpc_id', local_var_params['vpc_id']))
if 'subnet_id' in local_var_params:
query_params.append(('subnet_id', local_var_params['subnet_id']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_instances_by_tags(self, request):
"""查询资源实例
根据标签查询指定的数据库实例。
:param ListInstancesByTagsRequest request
:return: ListInstancesByTagsResponse
"""
return self.list_instances_by_tags_with_http_info(request)
def list_instances_by_tags_with_http_info(self, request):
"""查询资源实例
根据标签查询指定的数据库实例。
:param ListInstancesByTagsRequest request
:return: ListInstancesByTagsResponse
"""
all_params = ['list_instances_by_tags_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/resource_instances/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListInstancesByTagsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_slow_logs(self, request):
"""查询数据库慢日志
查询数据库慢日志信息。
:param ListSlowLogsRequest request
:return: ListSlowLogsResponse
"""
return self.list_slow_logs_with_http_info(request)
def list_slow_logs_with_http_info(self, request):
"""查询数据库慢日志
查询数据库慢日志信息。
:param ListSlowLogsRequest request
:return: ListSlowLogsResponse
"""
all_params = ['instance_id', 'start_date', 'end_date', 'node_id', 'type', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'start_date' in local_var_params:
query_params.append(('start_date', local_var_params['start_date']))
if 'end_date' in local_var_params:
query_params.append(('end_date', local_var_params['end_date']))
if 'node_id' in local_var_params:
query_params.append(('node_id', local_var_params['node_id']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/slowlog',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSlowLogsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def reset_password(self, request):
"""修改实例的管理员密码
修改实例的管理员密码。
:param ResetPasswordRequest request
:return: ResetPasswordResponse
"""
return self.reset_password_with_http_info(request)
def reset_password_with_http_info(self, request):
"""修改实例的管理员密码
修改实例的管理员密码。
:param ResetPasswordRequest request
:return: ResetPasswordResponse
"""
all_params = ['instance_id', 'reset_password_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/password',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ResetPasswordResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def resize_instance(self, request):
"""变更实例规格
变更实例的规格。
:param ResizeInstanceRequest request
:return: ResizeInstanceResponse
"""
return self.resize_instance_with_http_info(request)
def resize_instance_with_http_info(self, request):
"""变更实例规格
变更实例的规格。
:param ResizeInstanceRequest request
:return: ResizeInstanceResponse
"""
all_params = ['instance_id', 'resize_instance_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/resize',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ResizeInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def resize_instance_volume(self, request):
"""扩容实例存储容量
扩容实例的存储容量大小。
:param ResizeInstanceVolumeRequest request
:return: ResizeInstanceVolumeResponse
"""
return self.resize_instance_volume_with_http_info(request)
def resize_instance_volume_with_http_info(self, request):
"""扩容实例存储容量
扩容实例的存储容量大小。
:param ResizeInstanceVolumeRequest request
:return: ResizeInstanceVolumeResponse
"""
all_params = ['instance_id', 'resize_instance_volume_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/extend-volume',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ResizeInstanceVolumeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_backup_policy(self, request):
"""设置自动备份策略
设置自动备份策略。
:param SetBackupPolicyRequest request
:return: SetBackupPolicyResponse
"""
return self.set_backup_policy_with_http_info(request)
def set_backup_policy_with_http_info(self, request):
"""设置自动备份策略
设置自动备份策略。
:param SetBackupPolicyRequest request
:return: SetBackupPolicyResponse
"""
all_params = ['instance_id', 'set_backup_policy_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/policy',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_backup_policy(self, request):
"""查询自动备份策略
查询自动备份策略。
:param ShowBackupPolicyRequest request
:return: ShowBackupPolicyResponse
"""
return self.show_backup_policy_with_http_info(request)
def show_backup_policy_with_http_info(self, request):
"""查询自动备份策略
查询自动备份策略。
:param ShowBackupPolicyRequest request
:return: ShowBackupPolicyResponse
"""
all_params = ['instance_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/backups/policy',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowBackupPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_configuration_detail(self, request):
"""获取指定参数模板的参数
获取指定参数模板的详细信息。
:param ShowConfigurationDetailRequest request
:return: ShowConfigurationDetailResponse
"""
return self.show_configuration_detail_with_http_info(request)
def show_configuration_detail_with_http_info(self, request):
"""获取指定参数模板的参数
获取指定参数模板的详细信息。
:param ShowConfigurationDetailRequest request
:return: ShowConfigurationDetailResponse
"""
all_params = ['config_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowConfigurationDetailResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_instance_configuration(self, request):
"""获取指定实例的参数模板
获取指定实例的参数模板。
:param ShowInstanceConfigurationRequest request
:return: ShowInstanceConfigurationResponse
"""
return self.show_instance_configuration_with_http_info(request)
def show_instance_configuration_with_http_info(self, request):
"""获取指定实例的参数模板
获取指定实例的参数模板。
:param ShowInstanceConfigurationRequest request
:return: ShowInstanceConfigurationResponse
"""
all_params = ['instance_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/configurations',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowInstanceConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_quotas(self, request):
"""查询配额
查询单租户在GaussDBforNoSQL服务下的资源配额。
:param ShowQuotasRequest request
:return: ShowQuotasResponse
"""
return self.show_quotas_with_http_info(request)
def show_quotas_with_http_info(self, request):
"""查询配额
查询单租户在GaussDBforNoSQL服务下的资源配额。
:param ShowQuotasRequest request
:return: ShowQuotasResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/quotas',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowQuotasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def shrink_instance_node(self, request):
"""缩容指定集群实例的节点数量
缩容指定集群实例的节点数量。
:param ShrinkInstanceNodeRequest request
:return: ShrinkInstanceNodeResponse
"""
return self.shrink_instance_node_with_http_info(request)
def shrink_instance_node_with_http_info(self, request):
"""缩容指定集群实例的节点数量
缩容指定集群实例的节点数量。
:param ShrinkInstanceNodeRequest request
:return: ShrinkInstanceNodeResponse
"""
all_params = ['instance_id', 'enlarge_instance_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/reduce-node',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShrinkInstanceNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_configuration(self, request):
"""修改参数模板参数
修改参数模板参数。
:param UpdateConfigurationRequest request
:return: UpdateConfigurationResponse
"""
return self.update_configuration_with_http_info(request)
def update_configuration_with_http_info(self, request):
"""修改参数模板参数
修改参数模板参数。
:param UpdateConfigurationRequest request
:return: UpdateConfigurationResponse
"""
all_params = ['config_id', 'update_configuration_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/configurations/{config_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_instance_configuration(self, request):
"""修改指定实例的参数
修改指定实例的参数。
:param UpdateInstanceConfigurationRequest request
:return: UpdateInstanceConfigurationResponse
"""
return self.update_instance_configuration_with_http_info(request)
def update_instance_configuration_with_http_info(self, request):
"""修改指定实例的参数
修改指定实例的参数。
:param UpdateInstanceConfigurationRequest request
:return: UpdateInstanceConfigurationResponse
"""
all_params = ['instance_id', 'update_instance_configuration_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/configurations',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateInstanceConfigurationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_instance_name(self, request):
"""修改实例名称
修改实例名称
:param UpdateInstanceNameRequest request
:return: UpdateInstanceNameResponse
"""
return self.update_instance_name_with_http_info(request)
def update_instance_name_with_http_info(self, request):
"""修改实例名称
修改实例名称
:param UpdateInstanceNameRequest request
:return: UpdateInstanceNameResponse
"""
all_params = ['instance_id', 'update_instance_name_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/name',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateInstanceNameResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_security_group(self, request):
"""变更实例安全组
变更实例关联的安全组
:param UpdateSecurityGroupRequest request
:return: UpdateSecurityGroupResponse
"""
return self.update_security_group_with_http_info(request)
def update_security_group_with_http_info(self, request):
"""变更实例安全组
变更实例关联的安全组
:param UpdateSecurityGroupRequest request
:return: UpdateSecurityGroupResponse
"""
all_params = ['instance_id', 'update_security_group_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v3/{project_id}/instances/{instance_id}/security-group',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateSecurityGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_api_version(self, request):
"""查询当前支持的API版本信息列表
查询当前支持的API版本信息列表。
:param ListApiVersionRequest request
:return: ListApiVersionResponse
"""
return self.list_api_version_with_http_info(request)
def list_api_version_with_http_info(self, request):
"""查询当前支持的API版本信息列表
查询当前支持的API版本信息列表。
:param ListApiVersionRequest request
:return: ListApiVersionResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListApiVersionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_api_version(self, request):
"""查询指定API版本信息
查询指定API版本信息。
:param ShowApiVersionRequest request
:return: ShowApiVersionResponse
"""
return self.show_api_version_with_http_info(request)
def show_api_version_with_http_info(self, request):
"""查询指定API版本信息
查询指定API版本信息。
:param ShowApiVersionRequest request
:return: ShowApiVersionResponse
"""
all_params = ['version']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'version' in local_var_params:
path_params['version'] = local_var_params['version']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/{version}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowApiVersionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type)
| 29.728805 | 113 | 0.623495 | 5,638 | 58,209 | 6.034055 | 0.053033 | 0.038095 | 0.066667 | 0.03786 | 0.878924 | 0.866461 | 0.833862 | 0.807349 | 0.796708 | 0.646855 | 0 | 0.000728 | 0.292532 | 58,209 | 1,957 | 114 | 29.743996 | 0.825381 | 0.118607 | 0 | 0.799639 | 0 | 0 | 0.101093 | 0.049903 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055054 | false | 0.005415 | 0.009025 | 0 | 0.121841 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c56332ec7e4757af4401f754fdf2b12b9fad86b6 | 70 | py | Python | python/baseline/dy/lm/__init__.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | null | null | null | python/baseline/dy/lm/__init__.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | null | null | null | python/baseline/dy/lm/__init__.py | domyounglee/baseline | 2261abfb7e770cc6f3d63a7f6e0015238d0e11f8 | [
"Apache-2.0"
] | 3 | 2019-05-27T04:52:21.000Z | 2022-02-15T00:22:53.000Z | from baseline.dy.lm.train import *
from baseline.dy.lm.model import *
| 23.333333 | 34 | 0.771429 | 12 | 70 | 4.5 | 0.583333 | 0.444444 | 0.518519 | 0.592593 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 70 | 2 | 35 | 35 | 0.870968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
3d7529abff41a56c453d32ff6b62e5af48e2a4c6 | 6,869 | py | Python | tests/util/conversion/test_datetime.py | danielschweigert/refactored-octo-succotash | 9c63f089ae6d06fc294594e754880bca267b26f9 | [
"MIT"
] | null | null | null | tests/util/conversion/test_datetime.py | danielschweigert/refactored-octo-succotash | 9c63f089ae6d06fc294594e754880bca267b26f9 | [
"MIT"
] | null | null | null | tests/util/conversion/test_datetime.py | danielschweigert/refactored-octo-succotash | 9c63f089ae6d06fc294594e754880bca267b26f9 | [
"MIT"
] | null | null | null | """
Unit test for utility class to convert datetimes.
"""
import unittest
from datetime import datetime
from pytz import timezone
from ros import DAY_FORMAT, STANDARD_TIMEZONE_STR
from ros.util.conversion.datetime import IntuitiveDateConverter
class IntuitiveDateConverterTestCase(unittest.TestCase):
def _run_and_evaluate_to_date(self,
conversion_input: object,
expected_output_type: type,
expected_output_value: object) -> None:
idc = IntuitiveDateConverter()
actual_output = idc.to_datetime(conversion_input)
actual_output_type = type(actual_output)
self.assertEqual(expected_output_type, actual_output_type)
self.assertEqual(expected_output_value, actual_output)
def _run_and_evaluate_to_day_str(self,
conversion_input: object,
expected_output_type: type,
expected_output_value: object) -> None:
idc = IntuitiveDateConverter()
actual_output = idc.to_day_str(conversion_input)
actual_output_type = type(actual_output)
self.assertEqual(expected_output_type, actual_output_type)
self.assertEqual(expected_output_value, actual_output)
def _run_and_evaluate_to_epoch_s(self,
conversion_input: object,
expected_output_type: type,
expected_output_value: object) -> None:
idc = IntuitiveDateConverter()
actual_output = idc.to_epoch_s(conversion_input)
actual_output_type = type(actual_output)
self.assertEqual(expected_output_type, actual_output_type)
self.assertEqual(expected_output_value, actual_output)
def test_conversion_datetime_to_datetime(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT)
expected_output_type = datetime
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT)
self._run_and_evaluate_to_date(conversion_input, expected_output_type, expected_output_value)
def test_conversion_day_str_to_datetime(self):
conversion_input = '2021-01-01'
expected_output_type = datetime
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT)
self._run_and_evaluate_to_date(conversion_input, expected_output_type, expected_output_value)
def test_conversion_epoch_s_to_datetime(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp()
expected_output_type = datetime
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))
self._run_and_evaluate_to_date(conversion_input, expected_output_type, expected_output_value)
def test_conversion_epoch_ms_to_datetime(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp() * 1000
expected_output_type = datetime
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))
self._run_and_evaluate_to_date(conversion_input, expected_output_type, expected_output_value)
def test_conversion_datetime_to_day_str(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT)
expected_output_type = str
expected_output_value = '2021-01-01'
self._run_and_evaluate_to_day_str(conversion_input, expected_output_type, expected_output_value)
def test_conversion_day_str_to_day_str(self):
conversion_input = '2021-01-01'
expected_output_type = str
expected_output_value = '2021-01-01'
self._run_and_evaluate_to_day_str(conversion_input, expected_output_type, expected_output_value)
def test_conversion_epoch_s_to_day_str(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp()
expected_output_type = str
expected_output_value = '2021-01-01'
self._run_and_evaluate_to_day_str(conversion_input, expected_output_type, expected_output_value)
def test_conversion_epoch_ms_to_day_str(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp() * 1000
expected_output_type = str
expected_output_value = '2021-01-01'
self._run_and_evaluate_to_day_str(conversion_input, expected_output_type, expected_output_value)
def test_conversion_day_str_to_epoch_s(self):
conversion_input = '2021-01-01'
expected_output_type = float
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT).timestamp()
self._run_and_evaluate_to_epoch_s(conversion_input, expected_output_type, expected_output_value)
def test_conversion_epoch_s_to_epoch_s(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp()
expected_output_type = float
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp()
self._run_and_evaluate_to_epoch_s(conversion_input, expected_output_type, expected_output_value)
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp()
expected_output_type = float
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT).timestamp()
self._run_and_evaluate_to_epoch_s(conversion_input, expected_output_type, expected_output_value)
def test_conversion_epoch_ms_to_epoch_s(self):
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp() * 1000
expected_output_type = float
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT).timestamp()
self._run_and_evaluate_to_epoch_s(conversion_input, expected_output_type, expected_output_value)
conversion_input = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp() * 1000
expected_output_type = float
expected_output_value = datetime.strptime('2021-01-01', DAY_FORMAT).astimezone(timezone(STANDARD_TIMEZONE_STR))\
.timestamp()
self._run_and_evaluate_to_epoch_s(conversion_input, expected_output_type, expected_output_value)
| 43.474684 | 120 | 0.716116 | 820 | 6,869 | 5.531707 | 0.060976 | 0.197531 | 0.126984 | 0.092152 | 0.942681 | 0.926587 | 0.921296 | 0.921296 | 0.921296 | 0.903439 | 0 | 0.041093 | 0.206435 | 6,869 | 157 | 121 | 43.751592 | 0.791048 | 0.007134 | 0 | 0.783019 | 0 | 0 | 0.038168 | 0 | 0 | 0 | 0 | 0 | 0.056604 | 1 | 0.132075 | false | 0 | 0.04717 | 0 | 0.188679 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3da102efd54175fd52dbd892de9b55ecac99d206 | 7,625 | py | Python | nsupdate/api/_tests/test_api.py | cyroxx/nsupdate.info | f963672c4284d958255518f2ef6a44820e9109b9 | [
"BSD-3-Clause"
] | 1 | 2020-09-06T09:29:57.000Z | 2020-09-06T09:29:57.000Z | nsupdate/api/_tests/test_api.py | cyroxx/nsupdate.info | f963672c4284d958255518f2ef6a44820e9109b9 | [
"BSD-3-Clause"
] | null | null | null | nsupdate/api/_tests/test_api.py | cyroxx/nsupdate.info | f963672c4284d958255518f2ef6a44820e9109b9 | [
"BSD-3-Clause"
] | null | null | null | """
Tests for api package.
"""
from django.core.urlresolvers import reverse
from nsupdate.main.dnstools import query_ns
from nsupdate.main.models import Domain
from conftest import TESTDOMAIN, TEST_HOST, TEST_HOST2, TEST_SECRET, TEST_SECRET2
USERNAME = 'test'
PASSWORD = 'pass'
BASEDOMAIN = "nsupdate.info"
HOSTNAME = 'nsupdate-ddns-client-unittest.' + BASEDOMAIN
def test_myip(client):
response = client.get(reverse('myip'))
assert response.status_code == 200
assert response.content in [b'127.0.0.1', b'::1']
def test_nic_update_noauth(client):
response = client.get(reverse('nic_update'))
assert response.status_code == 401
assert response.content == b'badauth'
def make_basic_auth_header(username, password):
import base64
return b'Basic ' + base64.b64encode((username + ':' + password).encode('utf-8'))
def test_nic_update_badauth(client):
response = client.get(reverse('nic_update'),
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, "wrong"))
assert response.status_code == 401
assert response.content == b'badauth'
def test_nic_update_authorized_nonexistent_host(client):
response = client.get(reverse('nic_update') + '?hostname=nonexistent.nsupdate.info',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# we must not get this updated, it doesn't exist in the database:
assert response.content == b'nohost'
def test_nic_update_authorized_foreign_host(client):
response = client.get(reverse('nic_update') + '?hostname=%s' % TEST_HOST2,
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# we must not get this updated, this is a host of some other user!
assert response.content == b'nohost'
def test_nic_update_authorized_not_fqdn_hostname(client):
response = client.get(reverse('nic_update') + '?hostname=test',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
assert response.content == b'notfqdn'
def test_nic_update_authorized_not_fqdn_username(client):
response = client.get(reverse('nic_update'),
HTTP_AUTHORIZATION=make_basic_auth_header('test', TEST_SECRET))
assert response.status_code == 200
assert response.content == b'notfqdn'
def test_nic_update_authorized(client):
response = client.get(reverse('nic_update'),
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# we don't care whether it is nochg or good, but should be one of them:
content = response.content.decode('utf-8')
assert content.startswith('good ') or content.startswith('nochg ')
def test_nic_update_authorized_ns_unavailable(client):
d = Domain.objects.get(domain=TESTDOMAIN)
d.available = False # simulate DNS unavailability
d.save()
response = client.get(reverse('nic_update'),
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
assert response.content == b'dnserr'
def test_nic_update_authorized_myip(client):
response = client.get(reverse('nic_update') + '?myip=4.3.2.1',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# we don't care whether it is nochg or good, but should be the ip from myip=...:
assert response.content in [b'good 4.3.2.1', b'nochg 4.3.2.1']
response = client.get(reverse('nic_update') + '?myip=1.2.3.4',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# must be good (was different IP)
assert response.content == b'good 1.2.3.4'
response = client.get(reverse('nic_update') + '?myip=1.2.3.4',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# must be nochg (was same IP)
assert response.content == b'nochg 1.2.3.4'
def test_nic_update_authorized_update_other_services(client):
response = client.get(reverse('nic_update') + '?myip=4.3.2.1',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# we don't care whether it is nochg or good, but should be the ip from myip=...:
assert response.content in [b'good 4.3.2.1', b'nochg 4.3.2.1']
response = client.get(reverse('nic_update') + '?myip=1.2.3.4',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# must be good (was different IP)
assert response.content == b'good 1.2.3.4'
# now check if it updated the other service also:
assert query_ns(HOSTNAME, 'A') == '1.2.3.4'
response = client.get(reverse('nic_update') + '?myip=2.3.4.5',
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET))
assert response.status_code == 200
# must be good (was different IP)
assert response.content == b'good 2.3.4.5'
# now check if it updated the other service also:
assert query_ns(HOSTNAME, 'A') == '2.3.4.5'
def test_nic_update_authorized_badagent(client, settings):
settings.BAD_AGENTS = ['foo', 'bad_agent', 'bar', ]
response = client.get(reverse('nic_update'),
HTTP_AUTHORIZATION=make_basic_auth_header(TEST_HOST, TEST_SECRET),
HTTP_USER_AGENT='bad_agent')
assert response.status_code == 200
assert response.content == b'badagent'
def test_nic_update_session_nosession(client):
response = client.get(reverse('nic_update_authorized'))
assert response.status_code == 302 # redirects to login view
def test_nic_update_session_no_hostname(client):
client.login(username=USERNAME, password=PASSWORD)
response = client.get(reverse('nic_update_authorized'))
assert response.status_code == 200
assert response.content == b'nohost' # we did not tell which host
def test_nic_update_session(client):
client.login(username=USERNAME, password=PASSWORD)
response = client.get(reverse('nic_update_authorized') + '?hostname=%s' % (TEST_HOST, ))
assert response.status_code == 200
content = response.content.decode('utf-8')
assert content.startswith('good ') or content.startswith('nochg ')
def test_nic_update_session_myip(client):
client.login(username=USERNAME, password=PASSWORD)
response = client.get(reverse('nic_update_authorized') + '?hostname=%s&myip=%s' % (TEST_HOST, '1.2.3.4'))
assert response.status_code == 200
content = response.content.decode('utf-8')
assert content.startswith('good 1.2.3.4') or content.startswith('nochg 1.2.3.4')
def test_nic_update_session_foreign_host(client):
client.login(username=USERNAME, password=PASSWORD)
response = client.get(reverse('nic_update_authorized') + '?hostname=%s' % TEST_HOST2)
assert response.status_code == 200
# we must not get this updated, this is a host of some other user!
assert response.content == b'nohost'
def test_detect_ip_invalid_session(client):
response = client.get(reverse('detectip', args=('invalid_session_id', )))
assert response.status_code == 204
def test_ajax_get_ips(client):
response = client.get(reverse('ajax_get_ips'))
assert response.status_code == 200
| 41.216216 | 109 | 0.70059 | 1,051 | 7,625 | 4.862036 | 0.144624 | 0.109589 | 0.076517 | 0.108023 | 0.801566 | 0.742466 | 0.735029 | 0.723483 | 0.704892 | 0.660665 | 0 | 0.026766 | 0.186623 | 7,625 | 184 | 110 | 41.440217 | 0.797162 | 0.097574 | 0 | 0.524194 | 0 | 0 | 0.122175 | 0.024785 | 0 | 0 | 0 | 0 | 0.362903 | 1 | 0.16129 | false | 0.056452 | 0.040323 | 0 | 0.209677 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
9ab4481337a4d8baa5bf8fb94d5ad20ebcede853 | 45,633 | py | Python | PDA/PDA_Industry/app.py | gaufung/CodeBase | 0292b06cfe002b3ad0299e43bb51192816a02c74 | [
"MIT"
] | 4 | 2020-07-21T03:45:48.000Z | 2021-12-20T09:37:10.000Z | PDA/PDA_Industry/app.py | wsgan001/CodeBase | 0292b06cfe002b3ad0299e43bb51192816a02c74 | [
"MIT"
] | 1 | 2021-05-27T03:24:38.000Z | 2021-05-27T03:24:38.000Z | PDA/PDA_Industry/app.py | wsgan001/CodeBase | 0292b06cfe002b3ad0299e43bb51192816a02c74 | [
"MIT"
] | 4 | 2017-04-13T13:53:38.000Z | 2022-03-01T13:28:48.000Z | # -*- coding:utf8 -*-
import DataRead
import xlrd
import config
import LMDI
import logging
from xlwt import *
import math
import operator
import Algorithm
import numpy as np
import pandas as pd
from SinglePeriodAAM import Spaam
from MultiPeriodAAM import Mpaam
import WriteData
logging.basicConfig(level=logging.ERROR)
class AppLmdi(object):
'''
the lmdi application
'''
def __init__(self):
dmus_2006 = DataRead.read_dmus(config.PRO_2006_COL, config.SHEET_2006)
dmus_2007 = DataRead.read_dmus(config.PRO_2007_COL, config.SHEET_2007)
dmus_2008 = DataRead.read_dmus(config.PRO_2008_COL, config.SHEET_2008)
dmus_2009 = DataRead.read_dmus(config.PRO_2009_COL, config.SHEET_2009)
dmus_2010 = DataRead.read_dmus(config.PRO_2010_COL, config.SHEET_2010)
dmus_2011 = DataRead.read_dmus(config.PRO_2011_COL, config.SHEET_2011)
dmus_2012 = DataRead.read_dmus(config.PRO_2012_COL, config.SHEET_2012)
dmus_2013 = DataRead.read_dmus(config.PRO_2013_COL, config.SHEET_2013)
dmus_2014 = DataRead.read_dmus(config.PRO_2014_COL, config.SHEET_2014)
self.lmdi_2006_2007 = LMDI.Lmdi(dmus_2006, dmus_2007, '2006-2007')
self.lmdi_2007_2008 = LMDI.Lmdi(dmus_2007, dmus_2008, '2007-2008')
self.lmdi_2008_2009 = LMDI.Lmdi(dmus_2008, dmus_2009, '2008-2009')
self.lmdi_2009_2010 = LMDI.Lmdi(dmus_2009, dmus_2010, '2009-2010')
self.lmdi_2010_2011 = LMDI.Lmdi(dmus_2010, dmus_2011, '2010-2011')
self.lmdi_2011_2012 = LMDI.Lmdi(dmus_2011, dmus_2012, '2011-2012')
self.lmdi_2012_2013 = LMDI.Lmdi(dmus_2012, dmus_2013, '2012-2013')
self.lmdi_2013_2014 = LMDI.Lmdi(dmus_2013, dmus_2014, '2013-2014')
self.spaam_2006_2007 = Spaam(dmus_2006, dmus_2007, '2006-2007')
self.spaam_2007_2008 = Spaam(dmus_2007, dmus_2008, '2007-2008')
self.spaam_2008_2009 = Spaam(dmus_2008, dmus_2009, '2008-2009')
self.spaam_2009_2010 = Spaam(dmus_2009, dmus_2010, '2009-2010')
self.spaam_2010_2011 = Spaam(dmus_2010, dmus_2011, '2010-2011')
self.spaam_2011_2012 = Spaam(dmus_2011, dmus_2012, '2011-2012')
self.spaam_2012_2013 = Spaam(dmus_2012, dmus_2013, '2012-2013')
self.spaam_2013_2014 = Spaam(dmus_2013, dmus_2014, '2013-2014')
self.mpaam_2006_2007 = Mpaam([dmus_2006, dmus_2007], '2006-2007')
self.mpaam_2006_2008 = Mpaam([dmus_2006, dmus_2007, dmus_2008], '2006-2008')
self.mpaam_2006_2009 = Mpaam([dmus_2006, dmus_2007, dmus_2008, dmus_2009,
], '2006-2009')
self.mpaam_2006_2010 = Mpaam([dmus_2006, dmus_2007, dmus_2008, dmus_2009,
dmus_2010], '2006-2010')
self.mpaam_2006_2011 = Mpaam([dmus_2006, dmus_2007, dmus_2008, dmus_2009,
dmus_2010, dmus_2011], '2006-2011')
self.mpaam_2006_2012 = Mpaam([dmus_2006, dmus_2007, dmus_2008, dmus_2009,
dmus_2010, dmus_2011, dmus_2012], '2006-2012')
self.mpaam_2006_2013 = Mpaam([dmus_2006, dmus_2007, dmus_2008, dmus_2009,
dmus_2010, dmus_2011, dmus_2012, dmus_2013], '2006-2013')
self.mpaam_2006_2014 = Mpaam([dmus_2006, dmus_2007, dmus_2008, dmus_2009,
dmus_2010, dmus_2011, dmus_2012, dmus_2013,
dmus_2014], '2006-2014')
self.province_names = self.lmdi_2006_2007.province_names
def write_lmdi(self):
'''
write signle factor
'''
with WriteData.WriteLmdiData('LMDI结果.xls', self.lmdi_2006_2007,
self.lmdi_2007_2008, self.lmdi_2008_2009,
self.lmdi_2009_2010, self.lmdi_2010_2011,
self.lmdi_2011_2012, self.lmdi_2012_2013,
self.lmdi_2013_2014) as f:
f.write()
def write_single_attribute(self):
'''
write single attributon
'''
with WriteData.WriteSpaamData('单期归因.xls', self.spaam_2006_2007,
self.spaam_2007_2008, self.spaam_2008_2009,
self.spaam_2009_2010, self.spaam_2010_2011,
self.spaam_2011_2012, self.spaam_2012_2013,
self.spaam_2013_2014) as f:
f.write()
def write_lmdi_single(self, sheet):
'''
write single period lmdi
'''
columns = ['Period', 'Dcef', 'Demx', 'Dpei', 'Dpis', 'Disg',
'Deue', 'Dest', 'Dyoe', 'Dyct']
self._write_row(sheet, 0, columns)
self._write_row(sheet, 1, [self.spaam_2006_2007.name] + self.spaam_2006_2007.indexes)
self._write_row(sheet, 2, [self.spaam_2007_2008.name] + self.spaam_2007_2008.indexes)
self._write_row(sheet, 3, [self.spaam_2008_2009.name] + self.spaam_2008_2009.indexes)
self._write_row(sheet, 4, [self.spaam_2009_2010.name] + self.spaam_2009_2010.indexes)
self._write_row(sheet, 5, [self.spaam_2010_2011.name] + self.spaam_2010_2011.indexes)
self._write_row(sheet, 6, [self.spaam_2011_2012.name] + self.spaam_2011_2012.indexes)
self._write_row(sheet, 7, [self.spaam_2012_2013.name] + self.spaam_2012_2013.indexes)
self._write_row(sheet, 8, [self.spaam_2013_2014.name] + self.spaam_2013_2014.indexes)
def write_lmdi_multi(self, sheet):
'''
write multi period lmdi
'''
columns = ['Period', 'Dcef', 'Demx', 'Dpei', 'Dpis', 'Disg',
'Deue', 'Dest', 'Dyoe', 'Dyct']
self._write_row(sheet, 0, columns)
self._write_row(sheet, 1, ['2007'] + self.mpaam_2006_2014.indexes(1))
self._write_row(sheet, 2, ['2008'] + self.mpaam_2006_2014.indexes(2))
self._write_row(sheet, 3, ['2009'] + self.mpaam_2006_2014.indexes(3))
self._write_row(sheet, 4, ['2010'] + self.mpaam_2006_2014.indexes(4))
self._write_row(sheet, 5, ['2011'] + self.mpaam_2006_2014.indexes(5))
self._write_row(sheet, 6, ['2012'] + self.mpaam_2006_2014.indexes(6))
self._write_row(sheet, 7, ['2013'] + self.mpaam_2006_2014.indexes(7))
self._write_row(sheet, 8, ['2014'] + self.mpaam_2006_2014.indexes(8))
def _write_row(self, sheet, row, values):
'''
write a row
'''
column = 0
for value in values:
sheet.write(row, column, label=value)
column += 1
def write_single_attribution(self, save_file_name):
workbook = Workbook(encoding='utf8')
self._write_single_cef(workbook.add_sheet('cef'))
self._write_single_emx(workbook.add_sheet('emx'))
self._write_single_pei(workbook.add_sheet('pei'))
self._write_single_pis(workbook.add_sheet('pis'))
self._write_single_isg(workbook.add_sheet('isg'))
self._write_single_eue(workbook.add_sheet('eue'))
self._write_single_est(workbook.add_sheet('est'))
self._write_single_yoe(workbook.add_sheet('yoe'))
self._write_single_yct(workbook.add_sheet('yct'))
workbook.save(save_file_name)
def _write_single_cef(self, sheet):
'''
write single cef
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.cef_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.cef_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.cef_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.cef_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.cef_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.cef_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.cef_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.cef_attributions])
def _write_single_emx(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.emx_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.emx_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.emx_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.emx_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.emx_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.emx_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.emx_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.emx_attributions])
def _write_single_pei(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.pei_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.pei_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.pei_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.pei_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.pei_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.pei_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.pei_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.pei_attributions])
def _write_single_pis(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.pis_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.pis_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.pis_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.pis_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.pis_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.pis_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.pis_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.pis_attributions])
def _write_single_isg(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.isg_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.isg_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.isg_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.isg_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.isg_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.isg_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.isg_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.isg_attributions])
def _write_single_eue(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.eue_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.eue_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.eue_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.eue_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.eue_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.eue_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.eue_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.eue_attributions])
def _write_single_est(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.est_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.est_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.est_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.est_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.est_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.est_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.est_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.est_attributions])
def _write_single_yoe(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.yoe_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.yoe_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.yoe_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.yoe_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.yoe_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.yoe_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.yoe_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.yoe_attributions])
def _write_single_yct(self, sheet):
'''
write single emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item * 100 for item
in self.spaam_2006_2007.yct_attributions])
self._write_column(sheet, 2, ['2008'] + [item * 100 for item
in self.spaam_2007_2008.yct_attributions])
self._write_column(sheet, 3, ['2009'] + [item * 100 for item
in self.spaam_2008_2009.yct_attributions])
self._write_column(sheet, 4, ['2010'] + [item * 100 for item
in self.spaam_2009_2010.yct_attributions])
self._write_column(sheet, 5, ['2011'] + [item * 100 for item
in self.spaam_2010_2011.yct_attributions])
self._write_column(sheet, 6, ['2012'] + [item * 100 for item
in self.spaam_2011_2012.yct_attributions])
self._write_column(sheet, 7, ['2013'] + [item * 100 for item
in self.spaam_2012_2013.yct_attributions])
self._write_column(sheet, 8, ['2014'] + [item * 100 for item
in self.spaam_2013_2014.yct_attributions])
def write_multi_attribution(self, save_file_name):
'''
write multi attribution
'''
workbook = Workbook(encoding='utf8')
self._write_multi_cef(workbook.add_sheet('cef'))
self._write_multi_emx(workbook.add_sheet('emx'))
self._write_multi_pei(workbook.add_sheet('pei'))
self._write_multi_pis(workbook.add_sheet('pis'))
self._write_multi_isg(workbook.add_sheet('isg'))
self._write_multi_eue(workbook.add_sheet('eue'))
self._write_multi_est(workbook.add_sheet('est'))
self._write_multi_yoe(workbook.add_sheet('yoe'))
self._write_multi_yct(workbook.add_sheet('yct'))
workbook.save(save_file_name)
def _write_multi_cef(self, sheet):
'''
write multi cef
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.cef()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.cef()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.cef()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.cef()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.cef()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.cef()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.cef()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.cef()])
def _write_multi_emx(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.emx()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.emx()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.emx()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.emx()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.emx()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.emx()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.emx()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.emx()])
def _write_multi_pei(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.pei()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.pei()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.pei()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.pei()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.pei()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.pei()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.pei()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.pei()])
def _write_multi_pis(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.pis()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.pis()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.pis()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.pis()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.pis()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.pis()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.pis()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.pis()])
def _write_multi_isg(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.isg()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.isg()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.isg()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.isg()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.isg()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.isg()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.isg()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.isg()])
def _write_multi_eue(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.eue()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.eue()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.eue()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.eue()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.eue()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.eue()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.eue()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.eue()])
def _write_multi_est(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.est()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.est()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.est()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.est()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.est()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.est()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.est()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.est()])
def _write_multi_yoe(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.yoe()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.yoe()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.yoe()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.yoe()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.yoe()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.yoe()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.yoe()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.yoe()])
def _write_multi_yct(self, sheet):
'''
write multi emx
'''
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007'] + [item *100 for item
in self.mpaam_2006_2007.yct()])
self._write_column(sheet, 2, ['2008'] + [item *100 for item
in self.mpaam_2006_2008.yct()])
self._write_column(sheet, 3, ['2009'] + [item *100 for item
in self.mpaam_2006_2009.yct()])
self._write_column(sheet, 4, ['2010'] + [item *100 for item
in self.mpaam_2006_2010.yct()])
self._write_column(sheet, 5, ['2011'] + [item *100 for item
in self.mpaam_2006_2011.yct()])
self._write_column(sheet, 6, ['2012'] + [item *100 for item
in self.mpaam_2006_2012.yct()])
self._write_column(sheet, 7, ['2013'] + [item *100 for item
in self.mpaam_2006_2013.yct()])
self._write_column(sheet, 8, ['2014'] + [item *100 for item
in self.mpaam_2006_2014.yct()])
def write_multi_lmdi(self, save_file_name):
'''
write multi lmdi
'''
workbook = Workbook(encoding='utf8')
self._write_lmid_cef(workbook.add_sheet('cef'))
self._write_lmid_emx(workbook.add_sheet('emx'))
self._write_lmid_pei(workbook.add_sheet('pei'))
self._write_lmid_pis(workbook.add_sheet('pis'))
self._write_lmid_isg(workbook.add_sheet('isg'))
self._write_lmid_eue(workbook.add_sheet('eue'))
self._write_lmid_est(workbook.add_sheet('est'))
self._write_lmid_yoe(workbook.add_sheet('yoe'))
self._write_lmid_yct(workbook.add_sheet('yct'))
workbook.save(save_file_name)
def _write_lmid_cef(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.cef()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.cef()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.cef()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.cef()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.cef()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.cef()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.cef()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.cef()))
def _write_lmid_emx(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.emx()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.emx()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.emx()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.emx()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.emx()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.emx()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.emx()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.emx()))
def _write_lmid_pei(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.pei()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.pei()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.pei()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.pei()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.pei()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.pei()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.pei()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.pei()))
def _write_lmid_pis(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.pis()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.pis()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.pis()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.pis()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.pis()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.pis()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.pis()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.pis()))
def _write_lmid_isg(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.isg()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.isg()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.isg()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.isg()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.isg()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.isg()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.isg()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.isg()))
def _write_lmid_eue(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.eue()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.eue()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.eue()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.eue()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.eue()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.eue()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.eue()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.eue()))
def _write_lmid_est(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.est()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.est()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.est()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.est()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.est()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.est()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.est()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.est()))
def _write_lmid_yoe(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.yoe()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.yoe()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.yoe()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.yoe()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.yoe()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.yoe()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.yoe()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.yoe()))
def _write_lmid_yct(self, sheet):
self._write_column(sheet, 0, ['Province']+self.province_names)
self._write_column(sheet, 1, ['2007']+list(self.lmdi_2006_2007.yct()))
self._write_column(sheet, 2, ['2008']+list(self.lmdi_2007_2008.yct()))
self._write_column(sheet, 3, ['2009']+list(self.lmdi_2008_2009.yct()))
self._write_column(sheet, 4, ['2010']+list(self.lmdi_2009_2010.yct()))
self._write_column(sheet, 5, ['2011']+list(self.lmdi_2010_2011.yct()))
self._write_column(sheet, 6, ['2012']+list(self.lmdi_2011_2012.yct()))
self._write_column(sheet, 7, ['2013']+list(self.lmdi_2012_2013.yct()))
self._write_column(sheet, 8, ['2014']+list(self.lmdi_2013_2014.yct()))
def _write_column(self, sheet, column, values):
'''
write a column
'''
row = 0
for value in values:
sheet.write(row, column, value)
row += 1
def write_ci_province(self, xls_file_name):
workbook = Workbook(encoding='utf8')
self._write_emx(workbook.add_sheet('emx'))
self._write_pei(workbook.add_sheet('pei'))
self._write_cef(workbook.add_sheet('cef'))
self._write_eue(workbook.add_sheet('eue'))
self._write_est(workbook.add_sheet('est'))
workbook.save(xls_file_name)
def _write_emx(self, sheet):
self._write_column(sheet, 0, ['Province'] + self.province_names)
self._write_column(sheet, 1, ['2007'] + self.spaam_2006_2007.emx_province)
self._write_column(sheet, 2, ['2008'] + self.spaam_2007_2008.emx_province)
self._write_column(sheet, 3, ['2009'] + self.spaam_2008_2009.emx_province)
self._write_column(sheet, 4, ['2010'] + self.spaam_2009_2010.emx_province)
self._write_column(sheet, 5, ['2011'] + self.spaam_2010_2011.emx_province)
self._write_column(sheet, 6, ['2012'] + self.spaam_2011_2012.emx_province)
self._write_column(sheet, 7, ['2013'] + self.spaam_2012_2013.emx_province)
self._write_column(sheet, 8, ['2014'] + self.spaam_2013_2014.emx_province)
def _write_pei(self, sheet):
self._write_column(sheet, 0, ['Province'] + self.province_names)
self._write_column(sheet, 1, ['2007'] + self.spaam_2006_2007.pei_province)
self._write_column(sheet, 2, ['2008'] + self.spaam_2007_2008.pei_province)
self._write_column(sheet, 3, ['2009'] + self.spaam_2008_2009.pei_province)
self._write_column(sheet, 4, ['2010'] + self.spaam_2009_2010.pei_province)
self._write_column(sheet, 5, ['2011'] + self.spaam_2010_2011.pei_province)
self._write_column(sheet, 6, ['2012'] + self.spaam_2011_2012.pei_province)
self._write_column(sheet, 7, ['2013'] + self.spaam_2012_2013.pei_province)
self._write_column(sheet, 8, ['2014'] + self.spaam_2013_2014.pei_province)
def _write_cef(self, sheet):
self._write_column(sheet, 0, ['Province'] + self.province_names)
self._write_column(sheet, 1, ['2007'] + self.spaam_2006_2007.cef_province)
self._write_column(sheet, 2, ['2008'] + self.spaam_2007_2008.cef_province)
self._write_column(sheet, 3, ['2009'] + self.spaam_2008_2009.cef_province)
self._write_column(sheet, 4, ['2010'] + self.spaam_2009_2010.cef_province)
self._write_column(sheet, 5, ['2011'] + self.spaam_2010_2011.cef_province)
self._write_column(sheet, 6, ['2012'] + self.spaam_2011_2012.cef_province)
self._write_column(sheet, 7, ['2013'] + self.spaam_2012_2013.cef_province)
self._write_column(sheet, 8, ['2014'] + self.spaam_2013_2014.cef_province)
def _write_eue(self, sheet):
self._write_column(sheet, 0, ['Province'] + self.province_names)
self._write_column(sheet, 1, ['2007'] + self.spaam_2006_2007.eue_province)
self._write_column(sheet, 2, ['2008'] + self.spaam_2007_2008.eue_province)
self._write_column(sheet, 3, ['2009'] + self.spaam_2008_2009.eue_province)
self._write_column(sheet, 4, ['2010'] + self.spaam_2009_2010.eue_province)
self._write_column(sheet, 5, ['2011'] + self.spaam_2010_2011.eue_province)
self._write_column(sheet, 6, ['2012'] + self.spaam_2011_2012.eue_province)
self._write_column(sheet, 7, ['2013'] + self.spaam_2012_2013.eue_province)
self._write_column(sheet, 8, ['2014'] + self.spaam_2013_2014.eue_province)
def _write_est(self, sheet):
self._write_column(sheet, 0, ['Province'] + self.province_names)
self._write_column(sheet, 1, ['2007'] + self.spaam_2006_2007.est_province)
self._write_column(sheet, 2, ['2008'] + self.spaam_2007_2008.est_province)
self._write_column(sheet, 3, ['2009'] + self.spaam_2008_2009.est_province)
self._write_column(sheet, 4, ['2010'] + self.spaam_2009_2010.est_province)
self._write_column(sheet, 5, ['2011'] + self.spaam_2010_2011.est_province)
self._write_column(sheet, 6, ['2012'] + self.spaam_2011_2012.est_province)
self._write_column(sheet, 7, ['2013'] + self.spaam_2012_2013.est_province)
self._write_column(sheet, 8, ['2014'] + self.spaam_2013_2014.est_province)
if __name__ == '__main__':
app = AppLmdi()
'''
workbook = Workbook(encoding='utf8')
app.write_lmdi_single(workbook.add_sheet('单期LMDI'))
app.write_lmdi_multi(workbook.add_sheet('跨期LMDI'))
workbook.save('output/LMDI单期和跨期.xls')
app.write_multi_lmdi('output/省份lmdi明细.xls')
app.write_single_attribution('output/单期归因.xls')
app.write_multi_attribution('output/跨期归因.xls')
'''
app.write_ci_province('output/省份ldmi结果.xls')
| 64.362482 | 119 | 0.5537 | 5,544 | 45,633 | 4.233586 | 0.022547 | 0.130374 | 0.184057 | 0.245409 | 0.878574 | 0.843211 | 0.808018 | 0.749521 | 0.729752 | 0.722807 | 0 | 0.160987 | 0.321566 | 45,633 | 708 | 120 | 64.45339 | 0.597145 | 0.010935 | 0 | 0.312804 | 0 | 0 | 0.040055 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.069692 | false | 0 | 0.02269 | 0 | 0.094003 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9ae6514b734d11867c5834b669677b7d2bb82344 | 20,671 | py | Python | LSTM_Text_Generation/ConvModel/ConvOtherStructureModel.py | naykun/MusicResearch | 97bd64f23710c9f45634da0fd4674172746cfaf5 | [
"Apache-2.0"
] | 5 | 2018-07-11T02:39:40.000Z | 2020-09-07T19:26:46.000Z | LSTM_Text_Generation/ConvModel/ConvOtherStructureModel.py | naykun/MusicResearch | 97bd64f23710c9f45634da0fd4674172746cfaf5 | [
"Apache-2.0"
] | null | null | null | LSTM_Text_Generation/ConvModel/ConvOtherStructureModel.py | naykun/MusicResearch | 97bd64f23710c9f45634da0fd4674172746cfaf5 | [
"Apache-2.0"
] | 2 | 2018-07-11T13:36:19.000Z | 2018-09-01T03:42:04.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import warnings
from keras.models import Model
from keras import layers
from keras.layers import Activation
from keras.layers import Dense
from keras.layers import Input
from keras.layers import BatchNormalization
from keras.layers import Conv1D
from keras.layers import MaxPooling1D
from keras.layers import AveragePooling1D
from keras.layers import GlobalAveragePooling1D, LocallyConnected1D, Lambda
from keras.layers import GlobalMaxPooling1D
from keras.engine.topology import get_source_inputs
from keras.utils.data_utils import get_file
from keras import backend as K
print(123123123121)
def conv1d_bn(x,
filters,
num_row,
num_col,
padding='same',
strides=(1, 1),
name=None):
"""Utility function to apply conv + BN.
# Arguments
x: input tensor.
filters: filters in `Conv1D`.
num_row: height of the convolution kernel.
num_col: width of the convolution kernel.
padding: padding mode in `Conv1D`.
strides: strides in `Conv1D`.
name: name of the ops; will become `name + '_conv'`
for the convolution and `name + '_bn'` for the
batch norm layer.
# Returns
Output tensor after applying `Conv1D` and `BatchNormalization`.
"""
if name is not None:
bn_name = name + '_bn'
conv_name = name + '_conv'
else:
bn_name = None
conv_name = None
if K.image_data_format() == 'channels_first':
bn_axis = 1
else:
bn_axis = 3
bn_axis = 1
x = Conv1D(
filters, (num_row),
strides=strides[0],
padding=padding,
use_bias=False,
name=conv_name)(x)
x = BatchNormalization(axis=bn_axis, scale=False, name=bn_name)(x)
x = Activation('relu', name=name)(x)
return x
#padding_length should be the next conv's filter size
def same_padding_second_dim(x, padding_length, name):
if(padding_length % 2 == 0 ):
l , r = padding_length/2 , padding_length/2 - 1
else:
l , r = padding_length/2 , padding_length/2 + 1
l , r = int(l), int(r)
if(l < 0 ):l = 0
if(r < 0): r = 0
x = Lambda(lambda x: K.temporal_padding(x, padding=(l, r)))
# x = K.temporal_padding(x, padding=(l,r))
return x
def InceptionV3(include_top=True,
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000):
# if input_tensor is None:
# img_input = Input(shape=input_shape)
# else:
# if not K.is_keras_tensor(input_tensor):
# img_input = Input(tensor=input_tensor, shape=input_shape)
# else:
# img_input = input_tensor
img_input = Input(shape=input_shape)
if K.image_data_format() == 'channels_first':
channel_axis = 1
else:
channel_axis = 3
channel_axis = 2
x = conv1d_bn(img_input, 32, 3, 3, strides=(2, 2), padding='valid')
x = conv1d_bn(x, 32, 3, 3, padding='valid')
x = conv1d_bn(x, 64, 3, 3)
x = MaxPooling1D((3), strides=(2))(x)
x = conv1d_bn(x, 80, 1, 1, padding='valid')
x = conv1d_bn(x, 192, 3, 3, padding='valid')
x = MaxPooling1D((3), strides=(2))(x)
# mixed 0, 1, 2: 35 x 35 x 256
branch1x1 = conv1d_bn(x, 64, 1, 1)
branch5x5 = conv1d_bn(x, 48, 1, 1)
branch5x5 = conv1d_bn(branch5x5, 64, 5, 5)
branch3x3dbl = conv1d_bn(x, 64, 1, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, 3, 3)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 32, 1, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed0')
# mixed 1: 35 x 35 x 256
branch1x1 = conv1d_bn(x, 64, 1, 1)
branch5x5 = conv1d_bn(x, 48, 1, 1)
branch5x5 = conv1d_bn(branch5x5, 64, 5, 5)
branch3x3dbl = conv1d_bn(x, 64, 1, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, 3, 3)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 64, 1, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed1')
# mixed 2: 35 x 35 x 256
branch1x1 = conv1d_bn(x, 64, 1, 1)
branch5x5 = conv1d_bn(x, 48, 1, 1)
branch5x5 = conv1d_bn(branch5x5, 64, 5, 5)
branch3x3dbl = conv1d_bn(x, 64, 1, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, 3, 3)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 64, 1, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed2')
# mixed 3: 17 x 17 x 768
branch3x3 = conv1d_bn(x, 384, 3, 3, strides=(2, 2), padding='valid')
branch3x3dbl = conv1d_bn(x, 64, 1, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv1d_bn(
branch3x3dbl, 96, 3, 3, strides=(2, 2), padding='valid')
branch_pool = MaxPooling1D((3), strides=(2))(x)
x = layers.concatenate(
[branch3x3, branch3x3dbl, branch_pool], axis=channel_axis, name='mixed3')
# mixed 4: 17 x 17 x 768
branch1x1 = conv1d_bn(x, 192, 1, 1)
branch7x7 = conv1d_bn(x, 128, 1, 1)
branch7x7 = conv1d_bn(branch7x7, 128, 1, 7)
branch7x7 = conv1d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv1d_bn(x, 128, 1, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 128, 1, 7)
branch7x7dbl = conv1d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed4')
# mixed 5, 6: 17 x 17 x 768
for i in range(2):
branch1x1 = conv1d_bn(x, 192, 1, 1)
branch7x7 = conv1d_bn(x, 160, 1, 1)
branch7x7 = conv1d_bn(branch7x7, 160, 1, 7)
branch7x7 = conv1d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv1d_bn(x, 160, 1, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 160, 1, 7)
branch7x7dbl = conv1d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling1D(
(3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(5 + i))
# mixed 7: 17 x 17 x 768
branch1x1 = conv1d_bn(x, 192, 1, 1)
branch7x7 = conv1d_bn(x, 192, 1, 1)
branch7x7 = conv1d_bn(branch7x7, 192, 1, 7)
branch7x7 = conv1d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv1d_bn(x, 192, 1, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed7')
# mixed 8: 8 x 8 x 1280
branch3x3 = conv1d_bn(x, 192, 1, 1)
branch3x3 = conv1d_bn(branch3x3, 320, 3, 3,
strides=(2, 2), padding='valid')
branch7x7x3 = conv1d_bn(x, 192, 1, 1)
branch7x7x3 = conv1d_bn(branch7x7x3, 192, 1, 7)
branch7x7x3 = conv1d_bn(branch7x7x3, 192, 7, 1)
branch7x7x3 = conv1d_bn(
branch7x7x3, 192, 3, 3, strides=(2, 2), padding='valid')
branch_pool = MaxPooling1D((3), strides=(2))(x)
x = layers.concatenate(
[branch3x3, branch7x7x3, branch_pool], axis=channel_axis, name='mixed8')
# mixed 9: 8 x 8 x 2048
for i in range(2):
branch1x1 = conv1d_bn(x, 320, 1, 1)
branch3x3 = conv1d_bn(x, 384, 1, 1)
branch3x3_1 = conv1d_bn(branch3x3, 384, 1, 3)
branch3x3_2 = conv1d_bn(branch3x3, 384, 3, 1)
branch3x3 = layers.concatenate(
[branch3x3_1, branch3x3_2], axis=channel_axis, name='mixed9_' + str(i))
branch3x3dbl = conv1d_bn(x, 448, 1, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 384, 3, 3)
branch3x3dbl_1 = conv1d_bn(branch3x3dbl, 384, 1, 3)
branch3x3dbl_2 = conv1d_bn(branch3x3dbl, 384, 3, 1)
branch3x3dbl = layers.concatenate(
[branch3x3dbl_1, branch3x3dbl_2], axis=channel_axis)
branch_pool = AveragePooling1D(
(3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
print(branch1x1.shape, branch3x3.shape, branch3x3dbl.shape, branch_pool.shape)
print('#########'*3)
x = layers.concatenate(
[branch1x1, branch3x3, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(9 + i))
if include_top:
# Classification block
x = GlobalAveragePooling1D(name='avg_pool')(x)
x = Dense(classes, activation='softmax', name='predictions')(x)
else:
if pooling == 'avg':
x = GlobalAveragePooling1D()(x)
elif pooling == 'max':
x = GlobalMaxPooling1D()(x)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = Model(inputs, x, name='inception_v3')
return model
melody_feature_length = 100
s_filter_num, s_filter_size = 8 , 8
m_filter_num, m_filter_size = 16, 16
l_filter_num, l_filter_size = 32, 32
xl_filter_num, xl_filter_size = 64, 64
xxl_filter_num, xxl_filter_size = 128, 128
default_activation = 'relu'
def InceptionV3_Local(include_top=True,
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000):
def conv1d_bn_local(x,
filters,
num_row,
num_col,
padding='valid',
strides=(1, 1),
name=None):
strides = (1, 1)
if name is not None:
bn_name = name + '_bn'
conv_name = name + '_conv'
else:
bn_name = None
conv_name = None
if K.image_data_format() == 'channels_first':
bn_axis = 1
else:
bn_axis = 3
print("****" * 10)
print('before x:', x.shape, 'num row', num_row)
x = same_padding_second_dim(x, padding_length=num_row, name=x.name.split('/')[0])(x)
print('after x:', x.shape)
bn_axis = 1
x = LocallyConnected1D(
filters, (num_row),
strides=strides[0],
padding=padding,
use_bias=False,
name=conv_name)(x)
print('after local conv:', x.shape)
x = BatchNormalization(axis=bn_axis, scale=False, name=bn_name)(x)
x = Activation('relu', name=name)(x)
return x
img_input = Input(shape=input_shape)
channel_axis = 2
x = conv1d_bn_local(img_input, 32, m_filter_size, 3, strides=(2, 2), padding='valid')
x = conv1d_bn_local(x, 32, m_filter_size, 3, padding='valid')
x = conv1d_bn_local(x, 64, m_filter_size, 3)
x = MaxPooling1D((3), strides=(2))(x)
x = conv1d_bn(x, 80, 1, 1, padding='valid')
x = conv1d_bn(x, 192, m_filter_size, 3, padding='valid')
x = MaxPooling1D((3), strides=(2))(x)
# mixed 0, 1, 2: 35 x 35 x 256
branch1x1 = conv1d_bn(x, 64, m_filter_size, 1)
branch5x5 = conv1d_bn(x, 48, m_filter_size, 1)
branch5x5 = conv1d_bn(branch5x5, 64, m_filter_size, 5)
branch3x3dbl = conv1d_bn(x, 64, m_filter_size, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, m_filter_size, 3)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, m_filter_size, 3)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 32, m_filter_size, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed0')
# mixed 1: 35 x 35 x 256
branch1x1 = conv1d_bn(x, 64, m_filter_size, 1)
branch5x5 = conv1d_bn(x, 48, m_filter_size, 1)
branch5x5 = conv1d_bn(branch5x5, 64, m_filter_size, 5)
branch3x3dbl = conv1d_bn(x, 64, m_filter_size, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, m_filter_size, 3)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, m_filter_size, 3)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 64, m_filter_size, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed1')
# mixed 2: 35 x 35 x 256
branch1x1 = conv1d_bn(x, 64, m_filter_size, 1)
branch5x5 = conv1d_bn(x, 48, m_filter_size, 1)
branch5x5 = conv1d_bn(branch5x5, 64, m_filter_size, 5)
branch3x3dbl = conv1d_bn(x, 64, m_filter_size, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, m_filter_size, 3)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, m_filter_size, 3)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 64, s_filter_size, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed2')
# mixed 3: 17 x 17 x 768
branch3x3 = conv1d_bn(x, 384, s_filter_size, 3, strides=(2, 2), padding='valid')
branch3x3dbl = conv1d_bn(x, 64, s_filter_size, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 96, m_filter_size, 3)
branch3x3dbl = conv1d_bn(
branch3x3dbl, 96, m_filter_size, 3, strides=(2, 2), padding='valid')
branch_pool = MaxPooling1D((3), strides=(2))(x)
x = layers.concatenate(
[branch3x3, branch3x3dbl, branch_pool], axis=channel_axis, name='mixed3')
# mixed 4: 17 x 17 x 768
branch1x1 = conv1d_bn(x, 192, s_filter_size, 1)
branch7x7 = conv1d_bn(x, 128, s_filter_size, 1)
branch7x7 = conv1d_bn(branch7x7, 128, s_filter_size, 7)
branch7x7 = conv1d_bn(branch7x7, 192, m_filter_size, 1)
branch7x7dbl = conv1d_bn(x, 128, s_filter_size, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 128, 1, 7)
branch7x7dbl = conv1d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed4')
# mixed 5, 6: 17 x 17 x 768
for i in range(2):
branch1x1 = conv1d_bn(x, 192, 1, 1)
branch7x7 = conv1d_bn(x, 160, 1, 1)
branch7x7 = conv1d_bn(branch7x7, 160, 1, 7)
branch7x7 = conv1d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv1d_bn(x, 160, 1, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 160, 1, 7)
branch7x7dbl = conv1d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling1D(
(3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(5 + i))
# mixed 7: 17 x 17 x 768
branch1x1 = conv1d_bn(x, 192, 1, 1)
branch7x7 = conv1d_bn(x, 192, 1, 1)
branch7x7 = conv1d_bn(branch7x7, 192, 1, 7)
branch7x7 = conv1d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv1d_bn(x, 192, 1, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv1d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling1D((3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed7')
# mixed 8: 8 x 8 x 1280
branch3x3 = conv1d_bn(x, 192, 1, 1)
branch3x3 = conv1d_bn(branch3x3, 320, 3, 3,
strides=(2, 2), padding='valid')
branch7x7x3 = conv1d_bn(x, 192, 1, 1)
branch7x7x3 = conv1d_bn(branch7x7x3, 192, 1, 7)
branch7x7x3 = conv1d_bn(branch7x7x3, 192, 7, 1)
branch7x7x3 = conv1d_bn(
branch7x7x3, 192, 3, 3, strides=(2, 2), padding='valid')
branch_pool = MaxPooling1D((3), strides=(2))(x)
x = layers.concatenate(
[branch3x3, branch7x7x3, branch_pool], axis=channel_axis, name='mixed8')
# mixed 9: 8 x 8 x 2048
for i in range(1):
# if(i==1): conv1d_bn = conv1d_bn_local
branch1x1 = conv1d_bn(x, 320, 1, 1)
branch3x3 = conv1d_bn(x, 384, 1, 1)
branch3x3_1 = conv1d_bn(branch3x3, 384, 1, 3)
branch3x3_2 = conv1d_bn(branch3x3, 384, 3, 1)
branch3x3 = layers.concatenate(
[branch3x3_1, branch3x3_2], axis=channel_axis, name='mixed9_' + str(i))
branch3x3dbl = conv1d_bn(x, 448, 1, 1)
branch3x3dbl = conv1d_bn(branch3x3dbl, 384, 3, 3)
branch3x3dbl_1 = conv1d_bn(branch3x3dbl, 384, 1, 3)
branch3x3dbl_2 = conv1d_bn(branch3x3dbl, 384, 3, 1)
branch3x3dbl = layers.concatenate(
[branch3x3dbl_1, branch3x3dbl_2], axis=channel_axis)
branch_pool = AveragePooling1D(
(3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn(branch_pool, 192, 1, 1)
print(branch1x1.shape, branch3x3.shape, branch3x3dbl.shape, branch_pool.shape)
print('#########'*3)
x = layers.concatenate(
[branch1x1, branch3x3, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(9 + i))
# if(i==1): conv1d_bn = conv1d_bn_local
i = 1
conv1d_bn_local = conv1d_bn
branch1x1 = conv1d_bn_local(x, 320, m_filter_num, 1)
branch3x3 = conv1d_bn_local(x, 384, m_filter_num, 1)
branch3x3_1 = conv1d_bn_local(branch3x3, 384, m_filter_num, 3)
branch3x3_2 = conv1d_bn_local(branch3x3, 384, m_filter_num, 1)
# channel_axis = 1
branch3x3 = layers.concatenate(
[branch3x3_1, branch3x3_2], axis=channel_axis, name='mixed9_' + str(i))
branch3x3dbl = conv1d_bn_local(x, 448, m_filter_num, 1)
branch3x3dbl = conv1d_bn_local(branch3x3dbl, 384, m_filter_num, 3)
branch3x3dbl_1 = conv1d_bn_local(branch3x3dbl, 384, m_filter_num, 3)
branch3x3dbl_2 = conv1d_bn_local(branch3x3dbl, 384, m_filter_num, 1)
branch3x3dbl = layers.concatenate(
[branch3x3dbl_1, branch3x3dbl_2], axis=channel_axis)
branch_pool = AveragePooling1D(
(3), strides=(1), padding='same')(x)
branch_pool = conv1d_bn_local(branch_pool, 192, 2, 1)
print(branch1x1.shape,branch3x3.shape, branch3x3dbl.shape, branch_pool.shape )
x = layers.concatenate(
[branch1x1, branch3x3, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(9 + i))
if include_top:
# Classification block
x = GlobalAveragePooling1D(name='avg_pool')(x)
x = Dense(classes, activation='softmax', name='predictions')(x)
else:
if pooling == 'avg':
x = GlobalAveragePooling1D()(x)
elif pooling == 'max':
x = GlobalMaxPooling1D()(x)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = Model(inputs, x, name='inception_v3')
return model | 35.701209 | 92 | 0.62711 | 2,795 | 20,671 | 4.444365 | 0.068694 | 0.107551 | 0.041298 | 0.040573 | 0.864032 | 0.850266 | 0.829899 | 0.809853 | 0.788359 | 0.777411 | 0 | 0.120659 | 0.254656 | 20,671 | 579 | 93 | 35.701209 | 0.685597 | 0.075952 | 0 | 0.793911 | 0 | 0 | 0.02636 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01171 | false | 0 | 0.044496 | 0 | 0.067916 | 0.025761 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9ae9c5dbee9dba34d81aafcfa65c4365a3a39bd6 | 5,004 | py | Python | Media/common/Scripts/TestCompositor.py | dmacka/MultiverseClientServer | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | [
"MIT"
] | 5 | 2020-04-29T19:14:57.000Z | 2022-02-18T08:48:37.000Z | Media/common/Scripts/TestCompositor.py | dmacka/MultiverseClientServer | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | [
"MIT"
] | null | null | null | Media/common/Scripts/TestCompositor.py | dmacka/MultiverseClientServer | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | [
"MIT"
] | 2 | 2021-03-09T06:53:30.000Z | 2021-03-27T12:02:39.000Z | import ClientAPI
CurrentCompositor = ''
class TestCompositorEffect:
def __init__(self, oid):
self.OID = oid
def CancelEffect(self):
pass
def UpdateEffect(self):
pass
def ExecuteEffect(self):
global CurrentCompositor
waitTime = 3000
CurrentCompositor = 'Embossed'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Invert'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Laplace'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Posterize'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Old Movie'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Old TV'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Bloom'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'BlackAndWhite'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Glass'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Sharpen Edges'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Tiling'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Gaussian Blur'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
# Gaussian Blur needs a special listener
ClientAPI.Compositor.SetupGaussianListener(compositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Heat Vision'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
# Head Vision needs a special listener
compositor.AddListener(ClientAPI.Compositor.HeatVisionListener())
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'Motion Blur'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
CurrentCompositor = 'HDR'
compositor = ClientAPI.Compositor.Compositor(CurrentCompositor)
# HDR needs a special listener
ClientAPI.Compositor.SetupHDRListener(compositor)
compositor.Enabled = True
ClientAPI.Write('Activating compositor: ' + CurrentCompositor)
yield waitTime
compositor.Dispose()
ClientAPI.Write('Compositor test finished.')
# register the effect
ClientAPI.World.RegisterEffect("TestCompositorEffect", TestCompositorEffect)
| 36.525547 | 76 | 0.657874 | 361 | 5,004 | 9.108033 | 0.160665 | 0.24635 | 0.132299 | 0.17792 | 0.827859 | 0.793796 | 0.75 | 0.75 | 0.75 | 0.75 | 0 | 0.001098 | 0.271783 | 5,004 | 136 | 77 | 36.794118 | 0.901207 | 0.02478 | 0 | 0.719626 | 0 | 0 | 0.105641 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037383 | false | 0.018692 | 0.009346 | 0 | 0.056075 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
9aeb6e0d6f7b637e018df788642b0b512e102a1f | 51,215 | py | Python | Bot/Cogs/mangadex.py | No767/Rin | f5c475aedfdc4561ee7de4f24b600b7e04c3736c | [
"Apache-2.0"
] | 2 | 2021-08-16T15:47:43.000Z | 2022-03-06T07:38:04.000Z | Bot/Cogs/mangadex.py | No767/Rin | f5c475aedfdc4561ee7de4f24b600b7e04c3736c | [
"Apache-2.0"
] | 12 | 2021-07-31T04:46:03.000Z | 2022-03-25T04:46:27.000Z | Bot/Cogs/mangadex.py | No767/Rin | f5c475aedfdc4561ee7de4f24b600b7e04c3736c | [
"Apache-2.0"
] | 2 | 2021-11-06T00:17:42.000Z | 2022-03-06T07:38:11.000Z | import asyncio
import os
import aiohttp
import discord
import orjson
import uvloop
from discord.ext import commands
from dotenv import load_dotenv
from reactionmenu import ReactionMenu
load_dotenv()
MangaDex_API_Key = os.getenv("MangaDex_Access_Token")
class MangaDexV1(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="mangadex-search", aliases=["md-search"])
async def manga(self, ctx, *, manga: str):
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
try:
params = {
"title": manga,
"publicationDemographic[]": "none",
"contentRating[]": "safe",
"order[title]": "asc",
}
async with session.get(
f"https://api.mangadex.org/manga/", params=params
) as r:
data = await r.json()
id = data["data"][0]["id"]
async with session.get(
f'https://api.mangadex.org/manga/{id}?includes["cover_art"]&contentRating["safe"]&order[title]=asc'
) as resp:
md_data = await resp.json()
cover_art_id = md_data["data"]["relationships"][2]["id"]
async with session.get(
f"https://api.mangadex.org/cover/{cover_art_id}"
) as rp:
cover_art_data = await rp.json()
cover_art = cover_art_data["data"]["attributes"]["fileName"]
if "en" in data["data"][0]["attributes"]["title"]:
embedVar = discord.Embed()
embedVar.title = md_data["data"]["attributes"]["title"][
"en"
]
embedVar.description = (
str(
md_data["data"]["attributes"]["description"][
"en"
]
)
.replace("\n", "")
.replace("\r", "")
.replace("'", "")
)
embedVar.add_field(
name="Alt Titles",
value=str(
[
title["en"]
for title in md_data["data"]["attributes"][
"altTitles"
]
]
).replace("'", ""),
inline=True,
)
embedVar.add_field(
name="Publication Demographics",
value=md_data["data"]["attributes"][
"publicationDemographic"
],
inline=True,
)
embedVar.add_field(
name="Status",
value=md_data["data"]["attributes"]["status"],
inline=True,
)
embedVar.add_field(
name="Last Volume",
value=md_data["data"]["attributes"]["lastVolume"],
inline=True,
)
embedVar.add_field(
name="Last Chapter",
value=md_data["data"]["attributes"]["lastChapter"],
inline=True,
)
embedVar.add_field(
name="Tags",
value=str(
[
str(item["attributes"]
["name"]["en"])
.replace("\n", "")
.replace("'", "")
for item in md_data["data"]["attributes"][
"tags"
][0:-1]
]
),
inline=True,
)
embedVar.set_image(
url=f"https://uploads.mangadex.org/covers/{id}/{cover_art}"
)
await ctx.send(embed=embedVar)
elif "ja" in data["data"][0]["attributes"]["title"]:
embedVar2 = discord.Embed()
embedVar2.title = md_data["data"]["attributes"][
"title"
]["ja"]
embedVar2.description = (
str(
md_data["data"]["attributes"]["description"][
"ja"
]
)
.replace("\n", "")
.replace("\r", "")
.replace("'", "")
)
embedVar2.add_field(
name="Alt Titles",
value=str(
[
title["en"]
for title in md_data["data"]["attributes"][
"altTitles"
]
]
).replace("'", ""),
inline=True,
)
embedVar2.add_field(
name="Publication Demographics",
value=md_data["data"]["attributes"][
"publicationDemographic"
],
inline=True,
)
embedVar2.add_field(
name="Status",
value=md_data["data"]["attributes"]["status"],
inline=True,
)
embedVar2.add_field(
name="Last Volume",
value=md_data["data"]["attributes"]["lastVolume"],
inline=True,
)
embedVar2.add_field(
name="Last Chapter",
value=md_data["data"]["attributes"]["lastChapter"],
inline=True,
)
embedVar2.add_field(
name="Tags",
value=str(
[
str(item["attributes"]
["name"]["en"])
.replace("\n", "")
.replace("'", "")
for item in md_data["data"]["attributes"][
"tags"
][0:-1]
]
),
inline=True,
)
embedVar2.set_image(
url=f"https://uploads.mangadex.org/covers/{id}/{cover_art}"
)
await ctx.send(embed=embedVar2)
except Exception as e:
embedVar = discord.Embed()
embedVar.description = (
"Sadly this command didn't work. Please try again"
)
embedVar.add_field(name="Reason", value=e, inline=True)
await ctx.send(embed=embedVar)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
@manga.error
async def on_message_error(
self, ctx: commands.Context, error: commands.CommandError
):
if isinstance(error, commands.MissingRequiredArgument):
embedVar = discord.Embed(color=discord.Color.from_rgb(255, 51, 51))
embedVar.description = f"Missing a required argument: {error.param}"
msg = await ctx.send(embed=embedVar, delete_after=10)
await msg.delete(delay=10)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class MangaDexV2(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="mangadex-random", aliases=["md-random"])
async def manga_random(self, ctx):
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
async with session.get("https://api.mangadex.org/manga/random") as r:
data = await r.json()
id = data["data"]["id"]
cover_art_id = data["data"]["relationships"][2]["id"]
async with session.get(
f"https://api.mangadex.org/cover/{cover_art_id}"
) as rp:
cover_art_data = await rp.json()
cover_art = cover_art_data["data"]["attributes"]["fileName"]
try:
if r.status == 500:
embedError = discord.Embed()
embedError.description = (
"Sorry, but there was an error. Please try again"
)
embedError.add_field(
name="Reason",
value=data["errors"][0]["title"],
inline=True,
)
embedError.add_field(
name="Detail",
value=data["errors"][0]["detail"],
inline=True,
)
await ctx.send(embed=embedError)
else:
embedVar = discord.Embed(
title=data["data"]["attributes"]["title"]["en"]
)
embedVar.description = str(
data["data"]["attributes"]["description"]["en"]
).replace("\n", "")
embedVar.add_field(
name="Alt Titles",
value=str(
[
title["en"]
for title in data["data"]["attributes"][
"altTitles"
]
]
).replace("'", ""),
inline=True,
)
embedVar.add_field(
name="Original Language",
value=str(
[data["data"]["attributes"]
["originalLanguage"]]
).replace("", ""),
inline=True,
)
embedVar.add_field(
name="Last Volume",
value=str(
[data["data"]["attributes"]["lastVolume"]]
).replace("'", ""),
inline=True,
)
embedVar.add_field(
name="Last Chapter",
value=str(
[data["data"]["attributes"]["lastChapter"]]
).replace("'", ""),
inline=True,
)
embedVar.add_field(
name="Publication Demographic",
value=data["data"]["attributes"][
"publicationDemographic"
],
inline=True,
)
embedVar.add_field(
name="Status",
value=data["data"]["attributes"]["status"],
inline=True,
)
embedVar.add_field(
name="Content Rating",
value=data["data"]["attributes"]["contentRating"],
inline=True,
)
embedVar.add_field(
name="Tags",
value=str(
[
item["attributes"]["name"]["en"]
for item in data["data"]["attributes"]["tags"][
0:-1
]
]
)
.replace("\n", "")
.replace("'", ""),
inline=True,
)
embedVar.set_image(
url=f"https://uploads.mangadex.org/covers/{id}/{cover_art}"
)
await ctx.send(embed=embedVar)
except Exception as e:
embedVar = discord.Embed()
embedVar.description = (
f"The query could not be performed. Please try again."
)
embedVar.add_field(name="Reason", value=e, inline=True)
await ctx.send(embed=embedVar)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
@manga_random.error
async def on_message_error(
self, ctx: commands.Context, error: commands.CommandError
):
if isinstance(error, commands.MissingRequiredArgument):
embedVar = discord.Embed(color=discord.Color.from_rgb(255, 51, 51))
embedVar.description = f"Missing a required argument: {error.param}"
msg = await ctx.send(embed=embedVar, delete_after=10)
await msg.delete(delay=10)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class MangaDexV3(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="mangadex-scanlation-search", aliases=["md-ss"])
async def scanlation_search(self, ctx, *, search: str):
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
params = {
"limit": 1,
"name": search,
"order[name]": "asc",
"order[relevance]": "desc",
}
async with session.get(
"https://api.mangadex.org/group", params=params
) as totally_another_response:
md_data2 = await totally_another_response.json()
try:
if md_data2["data"] is None:
embed1 = discord.Embed()
embed1.description = (
"Sorry, but no results were found... Please try again."
)
embed1.add_field(
name="Total", value=md_data2["total"], inline=True
)
embed1.add_field(
name="HTTP Status",
value=totally_another_response.status,
inline=True,
)
await ctx.send(embed=embed1)
else:
embed2 = discord.Embed()
embed2.title = md_data2["data"][0]["attributes"]["name"]
embed2.description = md_data2["data"][0]["attributes"][
"description"
]
embed2.add_field(
name="Alt Names",
value=md_data2["data"][0]["attributes"]["altNames"],
inline=True,
)
embed2.add_field(
name="Website",
value=str(
[md_data2["data"][0]["attributes"]["website"]]
).replace("'", ""),
inline=True,
)
embed2.add_field(
name="IRC Server",
value=md_data2["data"][0]["attributes"]["ircServer"],
inline=True,
)
embed2.add_field(
name="Discord",
value=f"https://discord.gg/{md_data2['data'][0]['attributes']['discord']}",
inline=True,
)
embed2.add_field(
name="Contact Email",
value=str(
[md_data2["data"][0]["attributes"]["contactEmail"]]
).replace("'", ""),
inline=True,
)
embed2.add_field(
name="Twitter",
value=md_data2["data"][0]["attributes"]["twitter"],
inline=True,
)
embed2.add_field(
name="Focused Languages",
value=md_data2["data"][0]["attributes"]["focusedLanguages"],
inline=True,
)
embed2.add_field(
name="Official",
value=md_data2["data"][0]["attributes"]["official"],
inline=True,
)
embed2.add_field(
name="Verified",
value=md_data2["data"][0]["attributes"]["verified"],
inline=True,
)
embed2.add_field(
name="Created At",
value=md_data2["data"][0]["attributes"]["createdAt"],
inline=True,
)
embed2.add_field(
name="Updated At",
value=md_data2["data"][0]["attributes"]["updatedAt"],
inline=True,
)
embed2.add_field(
name="Inactive",
value=md_data2["data"][0]["attributes"]["inactive"],
inline=True,
)
await ctx.send(embed=embed2)
except Exception as e:
embedVar = discord.Embed()
embedVar.description = (
f"The query could not be performed. Please try again."
)
embedVar.add_field(name="Reason", value=e, inline=True)
await ctx.send(embed=embedVar)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
@scanlation_search.error
async def on_message_error(
self, ctx: commands.Context, error: commands.CommandError
):
if isinstance(error, commands.MissingRequiredArgument):
embedVar = discord.Embed(color=discord.Color.from_rgb(255, 51, 51))
embedVar.description = f"Missing a required argument: {error.param}"
msg = await ctx.send(embed=embedVar, delete_after=10)
await msg.delete(delay=10)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class MangaDexV4(commands.Cog):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.command(name="mangadex-scanlation-id", aliases=["md-si"])
async def scanlation_id(self, ctx, *, scanlation_id: str):
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
async with session.get(
f"https://api.mangadex.org/group/{scanlation_id}"
) as another_response:
payload = await another_response.json()
try:
if payload["data"] is None:
embed1 = discord.Embed()
embed1.description = (
"Sorry, but no results were found... Please try again."
)
embed1.add_field(
name="Total", value=payload["total"], inline=True
)
embed1.add_field(
name="HTTP Status",
value=another_response.status,
inline=True,
)
await ctx.send(embed=embed1)
else:
embed2 = discord.Embed()
embed2.title = payload["data"]["attributes"]["name"]
embed2.description = payload["data"]["attributes"][
"description"
]
embed2.add_field(
name="Alt Names",
value=payload["data"]["attributes"]["altNames"],
inline=True,
)
embed2.add_field(
name="Website",
value=str(
[payload["data"]["attributes"]["website"]]
).replace("'", ""),
inline=True,
)
embed2.add_field(
name="IRC Server",
value=payload["data"]["attributes"]["ircServer"],
inline=True,
)
embed2.add_field(
name="Discord",
value=f"https://discord.gg/{payload['data']['attributes']['discord']}",
inline=True,
)
embed2.add_field(
name="Contact Email",
value=str(
[payload["data"]["attributes"]["contactEmail"]]
).replace("'", ""),
inline=True,
)
embed2.add_field(
name="Twitter",
value=payload["data"]["attributes"]["twitter"],
inline=True,
)
embed2.add_field(
name="Focused Languages",
value=payload["data"]["attributes"]["focusedLanguages"],
inline=True,
)
embed2.add_field(
name="Official",
value=payload["data"]["attributes"]["official"],
inline=True,
)
embed2.add_field(
name="Verified",
value=payload["data"]["attributes"]["verified"],
inline=True,
)
embed2.add_field(
name="Created At",
value=payload["data"]["attributes"]["createdAt"],
inline=True,
)
embed2.add_field(
name="Updated At",
value=payload["data"]["attributes"]["updatedAt"],
inline=True,
)
embed2.add_field(
name="Inactive",
value=payload["data"]["attributes"]["inactive"],
inline=True,
)
await ctx.send(embed=embed2)
except Exception as e:
embedVar = discord.Embed()
embedVar.description = (
f"The query could not be performed. Please try again."
)
embedVar.add_field(name="Reason", value=e, inline=True)
await ctx.send(embed=embedVar)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
@scanlation_id.error
async def on_message_error(
self, ctx: commands.Context, error: commands.CommandError
):
if isinstance(error, commands.MissingRequiredArgument):
embedVar = discord.Embed(color=discord.Color.from_rgb(255, 51, 51))
embedVar.description = f"Missing a required argument: {error.param}"
msg = await ctx.send(embed=embedVar, delete_after=10)
await msg.delete(delay=10)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class MangaDexV5(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="mangadex-user", aliases=["md-user"])
async def user(self, ctx, *, user_id: str):
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
async with session.get(f"https://api.mangadex.org/user/{user_id}") as rep:
payload = await rep.json()
try:
embed = discord.Embed()
embed.title = payload["data"]["attributes"]["username"]
embed.add_field(
name="ID", value=payload["data"]["id"], inline=True)
embed.add_field(
name="Type", value=payload["data"]["type"], inline=True
)
embed.add_field(
name="Roles",
value=payload["data"]["attributes"]["roles"],
inline=True,
)
await ctx.send(embed=embed)
except Exception as e:
embedVar = discord.Embed()
embedVar.description = (
f"The query could not be performed. Please try again."
)
embedVar.add_field(name="Reason", value=e, inline=True)
await ctx.send(embed=embedVar)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
@user.error
async def on_message_error(
self, ctx: commands.Context, error: commands.CommandError
):
if isinstance(error, commands.MissingRequiredArgument):
embedVar = discord.Embed(color=discord.Color.from_rgb(255, 51, 51))
embedVar.description = f"Missing a required argument: {error.param}"
msg = await ctx.send(embed=embedVar, delete_after=10)
await msg.delete(delay=10)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class MangaDexV6(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="mangadex-author", aliases=["md-author"])
async def author(self, ctx, *, author_name: str):
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
params = {"limit": 1, "name": author_name, "order[name]": "asc"}
async with session.get(
"https://api.mangadex.org/author", params=params
) as author_response:
author_payload = await author_response.json()
try:
if author_payload["data"][0]["attributes"]["imageUrl"] is None:
embedVar = discord.Embed()
embedVar.title = author_payload["data"][0]["attributes"]["name"]
embedVar.description = author_payload["data"][0]["attributes"][
"biography"
]
embedVar.add_field(
name="ID",
value=author_payload["data"][0]["id"],
inline=True,
)
embedVar.add_field(
name="Twitter",
value=author_payload["data"][0]["attributes"]["twitter"],
inline=True,
)
embedVar.add_field(
name="Pixiv",
value=author_payload["data"][0]["attributes"]["pixiv"],
inline=True,
)
embedVar.add_field(
name="MelonBook",
value=author_payload["data"][0]["attributes"]["melonBook"],
inline=True,
)
embedVar.add_field(
name="FanBox",
value=author_payload["data"][0]["attributes"]["fanBox"],
inline=True,
)
embedVar.add_field(
name="Booth",
value=author_payload["data"][0]["attributes"]["booth"],
inline=True,
)
embedVar.add_field(
name="NicoVideo",
value=author_payload["data"][0]["attributes"]["nicoVideo"],
inline=True,
)
embedVar.add_field(
name="Skeb",
value=author_payload["data"][0]["attributes"]["skeb"],
inline=True,
)
embedVar.add_field(
name="Fantia",
value=author_payload["data"][0]["id"],
inline=True,
)
embedVar.add_field(
name="Tumblr",
value=author_payload["data"][0]["attributes"]["tumblr"],
inline=True,
)
embedVar.add_field(
name="YouTube",
value=author_payload["data"][0]["attributes"]["youtube"],
inline=True,
)
embedVar.add_field(
name="Website",
value=author_payload["data"][0]["attributes"]["website"],
inline=True,
)
embedVar.add_field(
name="Created At",
value=author_payload["data"][0]["attributes"]["createdAt"],
inline=True,
)
embedVar.add_field(
name="Updated At",
value=author_payload["data"][0]["attributes"]["updatedAt"],
inline=True,
)
await ctx.send(embed=embedVar)
else:
embedVar2 = discord.Embed()
embedVar2.title = author_payload["data"][0]["attributes"][
"name"
]
embedVar2.description = author_payload["data"][0]["attributes"][
"biography"
]
embedVar2.add_field(
name="ID",
value=author_payload["data"][0]["id"],
inline=True,
)
embedVar2.add_field(
name="Twitter",
value=author_payload["data"][0]["attributes"]["twitter"],
inline=True,
)
embedVar2.add_field(
name="Pixiv",
value=author_payload["data"][0]["attributes"]["pixiv"],
inline=True,
)
embedVar2.add_field(
name="MelonBook",
value=author_payload["data"][0]["attributes"]["melonbook"],
inline=True,
)
embedVar2.add_field(
name="FanBox",
value=author_payload["data"][0]["attributes"]["fanbox"],
inline=True,
)
embedVar2.add_field(
name="Booth",
value=author_payload["data"][0]["attributes"]["booth"],
inline=True,
)
embedVar2.add_field(
name="NicoVideo",
value=author_payload["data"][0]["attributes"]["nico"],
inline=True,
)
embedVar2.add_field(
name="Sekb",
value=author_payload["data"][0]["attributes"]["sekb"],
inline=True,
)
embedVar2.add_field(
name="Fantia",
value=author_payload["data"][0]["id"],
inline=True,
)
embedVar2.add_field(
name="Tumblr",
value=author_payload["data"][0]["attributes"]["tumblr"],
inline=True,
)
embedVar2.add_field(
name="YouTube",
value=author_payload["data"][0]["attributes"]["youtube"],
inline=True,
)
embedVar2.add_field(
name="Website",
value=author_payload["data"][0]["attributes"]["website"],
inline=True,
)
embedVar2.add_field(
name="Created At",
value=author_payload["data"][0]["attributes"]["createdAt"],
inline=True,
)
embedVar2.add_field(
name="Updated At",
value=author_payload["data"][0]["attributes"]["updatedAt"],
inline=True,
)
embedVar2.set_image(
url=author_payload["data"][0]["attributes"]["imageUrl"]
)
await ctx.send(embed=embedVar2)
except Exception as e:
embedVar = discord.Embed()
embedVar.description = (
f"The query could not be performed. Please try again."
)
embedVar.add_field(name="Reason", value=e, inline=True)
await ctx.send(embed=embedVar)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
@author.error
async def on_message_error(
self, ctx: commands.Context, error: commands.CommandError
):
if isinstance(error, commands.MissingRequiredArgument):
embedVar = discord.Embed(color=discord.Color.from_rgb(255, 51, 51))
embedVar.description = f"Missing a required argument: {error.param}"
msg = await ctx.send(embed=embedVar, delete_after=10)
await msg.delete(delay=10)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class MangaDexV7(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="mangadex-author-id", aliases=["md-author-id"])
async def author_id(self, ctx, *, author_id: str):
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
async with session.get(
f"https://api.mangadex.org/author/{author_id}"
) as author_r:
author_data = await author_r.json()
try:
if author_data["data"]["attributes"]["imageUrl"] is None:
embedVar = discord.Embed()
embedVar.title = author_data["data"]["attributes"]["name"]
embedVar.description = author_data["data"]["attributes"][
"biography"
]
embedVar.add_field(
name="ID", value=author_data["data"]["id"], inline=True
)
embedVar.add_field(
name="Twitter",
value=author_data["data"]["attributes"]["twitter"],
inline=True,
)
embedVar.add_field(
name="Pixiv",
value=author_data["data"]["attributes"]["pixiv"],
inline=True,
)
embedVar.add_field(
name="MelonBook",
value=author_data["data"]["attributes"]["melonBook"],
inline=True,
)
embedVar.add_field(
name="FanBox",
value=author_data["data"]["attributes"]["fanBox"],
inline=True,
)
embedVar.add_field(
name="Booth",
value=author_data["data"]["attributes"]["booth"],
inline=True,
)
embedVar.add_field(
name="NicoVideo",
value=author_data["data"]["attributes"]["nicoVideo"],
inline=True,
)
embedVar.add_field(
name="Skeb",
value=author_data["data"]["attributes"]["skeb"],
inline=True,
)
embedVar.add_field(
name="Fantia", value=author_data["data"]["id"], inline=True
)
embedVar.add_field(
name="Tumblr",
value=author_data["data"]["attributes"]["tumblr"],
inline=True,
)
embedVar.add_field(
name="YouTube",
value=author_data["data"]["attributes"]["youtube"],
inline=True,
)
embedVar.add_field(
name="Website",
value=author_data["data"]["attributes"]["website"],
inline=True,
)
embedVar.add_field(
name="Created At",
value=author_data["data"]["attributes"]["createdAt"],
inline=True,
)
embedVar.add_field(
name="Updated At",
value=author_data["data"]["attributes"]["updatedAt"],
inline=True,
)
await ctx.send(embed=embedVar)
else:
embedVar2 = discord.Embed()
embedVar2.title = author_data["data"]["attributes"]["name"]
embedVar2.description = author_data["data"]["attributes"][
"biography"
]
embedVar2.add_field(
name="ID", value=author_data["data"]["id"], inline=True
)
embedVar2.add_field(
name="Twitter",
value=author_data["data"]["attributes"]["twitter"],
inline=True,
)
embedVar2.add_field(
name="Pixiv",
value=author_data["data"]["attributes"]["pixiv"],
inline=True,
)
embedVar2.add_field(
name="MelonBook",
value=author_data["data"]["attributes"]["melonbook"],
inline=True,
)
embedVar2.add_field(
name="FanBox",
value=author_data["data"]["attributes"]["fanbox"],
inline=True,
)
embedVar2.add_field(
name="Booth",
value=author_data["data"]["attributes"]["booth"],
inline=True,
)
embedVar2.add_field(
name="NicoVideo",
value=author_data["data"]["attributes"]["nico"],
inline=True,
)
embedVar2.add_field(
name="Sekb",
value=author_data["data"]["attributes"]["sekb"],
inline=True,
)
embedVar2.add_field(
name="Fantia", value=author_data["data"]["id"], inline=True
)
embedVar2.add_field(
name="Tumblr",
value=author_data["data"]["attributes"]["tumblr"],
inline=True,
)
embedVar2.add_field(
name="YouTube",
value=author_data["data"]["attributes"]["youtube"],
inline=True,
)
embedVar2.add_field(
name="Website",
value=author_data["data"]["attributes"]["website"],
inline=True,
)
embedVar2.add_field(
name="Created At",
value=author_data["data"]["attributes"]["createdAt"],
inline=True,
)
embedVar2.add_field(
name="Updated At",
value=author_data["data"]["attributes"]["updatedAt"],
inline=True,
)
embedVar2.set_image(
url=author_data["data"]["attributes"]["imageUrl"]
)
await ctx.send(embed=embedVar2)
except Exception as e:
embedVar = discord.Embed()
embedVar.description = (
f"The query could not be performed. Please try again."
)
embedVar.add_field(name="Reason", value=e, inline=True)
await ctx.send(embed=embedVar)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
@author_id.error
async def on_message_error(
self, ctx: commands.Context, error: commands.CommandError
):
if isinstance(error, commands.MissingRequiredArgument):
embedVar = discord.Embed(color=discord.Color.from_rgb(255, 51, 51))
embedVar.description = f"Missing a required argument: {error.param}"
msg = await ctx.send(embed=embedVar, delete_after=10)
await msg.delete(delay=10)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class MangaDexReaderV1(commands.Cog):
def __init__(self, bot):
self.bot = bot
# Later this should allow for the name to be inputted, but for now it purely relies on the chapter id
@commands.command(name="mangadex-read", aliases=["md-read"])
async def manga_read(self, ctx, *, id: str):
try:
async with aiohttp.ClientSession(json_serialize=orjson.dumps) as session:
async with session.get(f"https://api.mangadex.org/chapter/{id}") as r:
data = await r.json()
chapter_hash = data["data"]["attributes"]["hash"]
var = 0
var += 1
list_of_images = data["data"]["attributes"]["data"][var]
len(data["data"]["attributes"]["data"])
chapter_name = data["data"]["attributes"]["title"]
chapter_num = data["data"]["attributes"]["chapter"]
manga_id = data["data"]["relationships"][1]["id"]
async with session.get(
f"https://api.mangadex.org/manga/{manga_id}"
) as resp:
data1 = await resp.json()
title = data1["data"]["attributes"]["title"]["en"]
embedVar = discord.Embed(
title=f"{title}",
color=discord.Color.from_rgb(231, 173, 255),
)
embedVar.description = f"{chapter_name} - Chapter {chapter_num}"
embedVar.set_image(
url=f"https://uploads.mangadex.org/data/{chapter_hash}/{list_of_images}"
)
embedVar2 = discord.Embed(title="test")
menu = ReactionMenu(
ctx,
back_button=ReactionMenu.EMOJI_BACK_BUTTON,
next_button=ReactionMenu.EMOJI_NEXT_BUTTON,
config=ReactionMenu.STATIC,
clear_reactions_after=False,
)
menu.add_page(embedVar)
for _ in list_of_images:
menu.add_page(embedVar)
menu.add_page(embedVar2)
await menu.start()
except Exception as e:
await ctx.send(e)
def setup(bot):
bot.add_cog(MangaDexV1(bot))
bot.add_cog(MangaDexV2(bot))
bot.add_cog(MangaDexV3(bot))
bot.add_cog(MangaDexV4(bot))
bot.add_cog(MangaDexV5(bot))
bot.add_cog(MangaDexV6(bot))
bot.add_cog(MangaDexV7(bot))
| 47.909261 | 124 | 0.367256 | 3,492 | 51,215 | 5.263459 | 0.076747 | 0.05049 | 0.075734 | 0.053319 | 0.847008 | 0.815288 | 0.786398 | 0.770022 | 0.756366 | 0.72247 | 0 | 0.011847 | 0.535234 | 51,215 | 1,068 | 125 | 47.95412 | 0.760324 | 0.001933 | 0 | 0.626851 | 0 | 0.000987 | 0.11947 | 0.003177 | 0 | 0 | 0 | 0 | 0 | 1 | 0.008885 | false | 0 | 0.008885 | 0 | 0.025666 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b10c4a1ed020398e360e144f19b769e526f59a54 | 133,406 | py | Python | mailchimp_marketing/api/reports_api.py | Moishe/mailchimp-marketing-python | 0bbc4bf6ac27a01c2f8f1472d065e14b754173f9 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing/api/reports_api.py | Moishe/mailchimp-marketing-python | 0bbc4bf6ac27a01c2f8f1472d065e14b754173f9 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing/api/reports_api.py | Moishe/mailchimp-marketing-python | 0bbc4bf6ac27a01c2f8f1472d065e14b754173f9 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Mailchimp Marketing API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 3.0.19
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from mailchimp_marketing.api_client import ApiClient
class ReportsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client):
self.api_client = api_client
def get_all_campaign_reports(self, **kwargs): # noqa: E501
"""List campaign reports # noqa: E501
Get campaign reports. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_campaign_reports(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str type: The campaign type.
:param datetime before_send_time: Restrict the response to campaigns sent before the set time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:param datetime since_send_time: Restrict the response to campaigns sent after the set time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: CampaignReports1
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_campaign_reports_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_campaign_reports_with_http_info(**kwargs) # noqa: E501
return data
def get_all_campaign_reports_with_http_info(self, **kwargs): # noqa: E501
"""List campaign reports # noqa: E501
Get campaign reports. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_campaign_reports_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str type: The campaign type.
:param datetime before_send_time: Restrict the response to campaigns sent before the set time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:param datetime since_send_time: Restrict the response to campaigns sent after the set time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: CampaignReports1
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset', 'type', 'before_send_time', 'since_send_time'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_campaign_reports" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'before_send_time' in params:
query_params.append(('before_send_time', params['before_send_time'])) # noqa: E501
if 'since_send_time' in params:
query_params.append(('since_send_time', params['since_send_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CampaignReports1', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_report(self, campaign_id, **kwargs): # noqa: E501
"""Get campaign report # noqa: E501
Get report details for a specific sent campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_report(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: CampaignReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_report_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_report_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_campaign_report_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""Get campaign report # noqa: E501
Get report details for a specific sent campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_report_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: CampaignReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CampaignReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_abuse_reports(self, campaign_id, **kwargs): # noqa: E501
"""List abuse reports # noqa: E501
Get a list of abuse complaints for a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_abuse_reports(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: AbuseComplaints1
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_abuse_reports_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_abuse_reports_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_campaign_abuse_reports_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List abuse reports # noqa: E501
Get a list of abuse complaints for a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_abuse_reports_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: AbuseComplaints1
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_abuse_reports" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/abuse-reports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbuseComplaints1', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_abuse_report(self, campaign_id, report_id, **kwargs): # noqa: E501
"""Get abuse report # noqa: E501
Get information about a specific abuse report for a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_abuse_report(campaign_id, report_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str report_id: The id for the abuse report. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: AbuseComplaint1
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_abuse_report_with_http_info(campaign_id, report_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_abuse_report_with_http_info(campaign_id, report_id, **kwargs) # noqa: E501
return data
def get_campaign_abuse_report_with_http_info(self, campaign_id, report_id, **kwargs): # noqa: E501
"""Get abuse report # noqa: E501
Get information about a specific abuse report for a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_abuse_report_with_http_info(campaign_id, report_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str report_id: The id for the abuse report. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: AbuseComplaint1
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'report_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_abuse_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'report_id' is set
if ('report_id' not in params or
params['report_id'] is None):
raise ValueError("Missing the required parameter `report_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'report_id' in params:
path_params['report_id'] = params['report_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/abuse-reports/{report_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbuseComplaint1', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_advice(self, campaign_id, **kwargs): # noqa: E501
"""List campaign feedback # noqa: E501
Get feedback based on a campaign's statistics. Advice feedback is based on campaign stats like opens, clicks, unsubscribes, bounces, and more. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_advice(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: CampaignAdviceReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_advice_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_advice_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_campaign_advice_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List campaign feedback # noqa: E501
Get feedback based on a campaign's statistics. Advice feedback is based on campaign stats like opens, clicks, unsubscribes, bounces, and more. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_advice_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: CampaignAdviceReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_advice" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/advice', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CampaignAdviceReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_click_details(self, campaign_id, **kwargs): # noqa: E501
"""List campaign details # noqa: E501
Get information about clicks on specific links in your Mailchimp campaigns. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_click_details(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: ClickDetailReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_click_details_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_click_details_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_campaign_click_details_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List campaign details # noqa: E501
Get information about clicks on specific links in your Mailchimp campaigns. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_click_details_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: ClickDetailReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_click_details" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/click-details', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClickDetailReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_click_details_for_link(self, campaign_id, link_id, **kwargs): # noqa: E501
"""Get campaign link details # noqa: E501
Get click details for a specific link in a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_click_details_for_link(campaign_id, link_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str link_id: The id for the link. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: ClickDetailReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_click_details_for_link_with_http_info(campaign_id, link_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_click_details_for_link_with_http_info(campaign_id, link_id, **kwargs) # noqa: E501
return data
def get_campaign_click_details_for_link_with_http_info(self, campaign_id, link_id, **kwargs): # noqa: E501
"""Get campaign link details # noqa: E501
Get click details for a specific link in a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_click_details_for_link_with_http_info(campaign_id, link_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str link_id: The id for the link. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: ClickDetailReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'link_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_click_details_for_link" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'link_id' is set
if ('link_id' not in params or
params['link_id'] is None):
raise ValueError("Missing the required parameter `link_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'link_id' in params:
path_params['link_id'] = params['link_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/click-details/{link_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClickDetailReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_subscribers_info(self, campaign_id, link_id, **kwargs): # noqa: E501
"""List clicked link subscribers # noqa: E501
Get information about list members who clicked on a specific link in a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscribers_info(campaign_id, link_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str link_id: The id for the link. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: ClickDetailMembers
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_subscribers_info_with_http_info(campaign_id, link_id, **kwargs) # noqa: E501
else:
(data) = self.get_subscribers_info_with_http_info(campaign_id, link_id, **kwargs) # noqa: E501
return data
def get_subscribers_info_with_http_info(self, campaign_id, link_id, **kwargs): # noqa: E501
"""List clicked link subscribers # noqa: E501
Get information about list members who clicked on a specific link in a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscribers_info_with_http_info(campaign_id, link_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str link_id: The id for the link. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: ClickDetailMembers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'link_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_subscribers_info" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'link_id' is set
if ('link_id' not in params or
params['link_id'] is None):
raise ValueError("Missing the required parameter `link_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'link_id' in params:
path_params['link_id'] = params['link_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/click-details/{link_id}/members', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClickDetailMembers', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_subscriber_info(self, campaign_id, link_id, subscriber_hash, **kwargs): # noqa: E501
"""Get clicked link subscriber # noqa: E501
Get information about a specific subscriber who clicked a link in a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscriber_info(campaign_id, link_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str link_id: The id for the link. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: ClickDetailMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_subscriber_info_with_http_info(campaign_id, link_id, subscriber_hash, **kwargs) # noqa: E501
else:
(data) = self.get_subscriber_info_with_http_info(campaign_id, link_id, subscriber_hash, **kwargs) # noqa: E501
return data
def get_subscriber_info_with_http_info(self, campaign_id, link_id, subscriber_hash, **kwargs): # noqa: E501
"""Get clicked link subscriber # noqa: E501
Get information about a specific subscriber who clicked a link in a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscriber_info_with_http_info(campaign_id, link_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str link_id: The id for the link. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: ClickDetailMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'link_id', 'subscriber_hash', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_subscriber_info" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'link_id' is set
if ('link_id' not in params or
params['link_id'] is None):
raise ValueError("Missing the required parameter `link_id` when calling ``") # noqa: E501
# verify the required parameter 'subscriber_hash' is set
if ('subscriber_hash' not in params or
params['subscriber_hash'] is None):
raise ValueError("Missing the required parameter `subscriber_hash` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'link_id' in params:
path_params['link_id'] = params['link_id'] # noqa: E501
if 'subscriber_hash' in params:
path_params['subscriber_hash'] = params['subscriber_hash'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/click-details/{link_id}/members/{subscriber_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClickDetailMember', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_domain_performance_for_campaign(self, campaign_id, **kwargs): # noqa: E501
"""List domain performance stats # noqa: E501
Get statistics for the top-performing email domains in a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain_performance_for_campaign(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: DomainPerformance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_domain_performance_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_domain_performance_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_domain_performance_for_campaign_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List domain performance stats # noqa: E501
Get statistics for the top-performing email domains in a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_domain_performance_for_campaign_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: DomainPerformance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_domain_performance_for_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/domain-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DomainPerformance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ecommerce_product_activity_for_campaign(self, campaign_id, **kwargs): # noqa: E501
"""List campaign product activity # noqa: E501
Get breakdown of product activity for a campaign # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ecommerce_product_activity_for_campaign(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str sort_field: Returns files sorted by the specified field.
:return: InlineResponse2008
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_ecommerce_product_activity_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_ecommerce_product_activity_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_ecommerce_product_activity_for_campaign_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List campaign product activity # noqa: E501
Get breakdown of product activity for a campaign # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ecommerce_product_activity_for_campaign_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str sort_field: Returns files sorted by the specified field.
:return: InlineResponse2008
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields', 'count', 'offset', 'sort_field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ecommerce_product_activity_for_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'sort_field' in params:
query_params.append(('sort_field', params['sort_field'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/ecommerce-product-activity', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2008', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_eepurl_activity_for_campaign(self, campaign_id, **kwargs): # noqa: E501
"""List eep url activity # noqa: E501
Get a summary of social activity for the campaign, tracked by EepURL. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_eepurl_activity_for_campaign(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EepurlActivity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_eepurl_activity_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_eepurl_activity_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_eepurl_activity_for_campaign_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List eep url activity # noqa: E501
Get a summary of social activity for the campaign, tracked by EepURL. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_eepurl_activity_for_campaign_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: EepurlActivity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_eepurl_activity_for_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/eepurl', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EepurlActivity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_email_activity_for_campaign(self, campaign_id, **kwargs): # noqa: E501
"""List email activity # noqa: E501
Get a list of member's subscriber activity in a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_activity_for_campaign(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str since: Restrict results to email activity events that occur after a specific time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: EmailActivity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_email_activity_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_email_activity_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_email_activity_for_campaign_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List email activity # noqa: E501
Get a list of member's subscriber activity in a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_activity_for_campaign_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str since: Restrict results to email activity events that occur after a specific time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: EmailActivity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields', 'count', 'offset', 'since'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_email_activity_for_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'since' in params:
query_params.append(('since', params['since'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/email-activity', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmailActivity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_email_activity_for_subscriber(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get subscriber email activity # noqa: E501
Get a specific list member's activity in a campaign including opens, clicks, and bounces. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_activity_for_subscriber(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param str since: Restrict results to email activity events that occur after a specific time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: EmailActivity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_email_activity_for_subscriber_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
else:
(data) = self.get_email_activity_for_subscriber_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
return data
def get_email_activity_for_subscriber_with_http_info(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get subscriber email activity # noqa: E501
Get a specific list member's activity in a campaign including opens, clicks, and bounces. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_activity_for_subscriber_with_http_info(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param str since: Restrict results to email activity events that occur after a specific time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: EmailActivity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'subscriber_hash', 'fields', 'exclude_fields', 'since'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_email_activity_for_subscriber" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'subscriber_hash' is set
if ('subscriber_hash' not in params or
params['subscriber_hash'] is None):
raise ValueError("Missing the required parameter `subscriber_hash` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'subscriber_hash' in params:
path_params['subscriber_hash'] = params['subscriber_hash'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'since' in params:
query_params.append(('since', params['since'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/email-activity/{subscriber_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmailActivity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_locations_for_campaign(self, campaign_id, **kwargs): # noqa: E501
"""List top open activities # noqa: E501
Get top open locations for a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_locations_for_campaign(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: OpenLocations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_locations_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_locations_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_locations_for_campaign_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List top open activities # noqa: E501
Get top open locations for a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_locations_for_campaign_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: OpenLocations
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_locations_for_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/locations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpenLocations', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_open_details(self, campaign_id, **kwargs): # noqa: E501
"""List campaign open details # noqa: E501
Get detailed information about any campaign emails that were opened by a list member. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_open_details(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str since: Restrict results to campaign open events that occur after a specific time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: OpenDetailReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_open_details_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_open_details_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_campaign_open_details_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List campaign open details # noqa: E501
Get detailed information about any campaign emails that were opened by a list member. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_open_details_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str since: Restrict results to campaign open events that occur after a specific time. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:return: OpenDetailReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields', 'count', 'offset', 'since'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_open_details" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'since' in params:
query_params.append(('since', params['since'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/open-details', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpenDetailReport', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_subscriber_info_for_opened_campaign(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get opened campaign subscriber # noqa: E501
Get information about a specific subscriber who opened a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscriber_info_for_opened_campaign(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: OpenActivity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_subscriber_info_for_opened_campaign_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
else:
(data) = self.get_subscriber_info_for_opened_campaign_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
return data
def get_subscriber_info_for_opened_campaign_with_http_info(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get opened campaign subscriber # noqa: E501
Get information about a specific subscriber who opened a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscriber_info_for_opened_campaign_with_http_info(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: OpenActivity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'subscriber_hash', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_subscriber_info_for_opened_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'subscriber_hash' is set
if ('subscriber_hash' not in params or
params['subscriber_hash'] is None):
raise ValueError("Missing the required parameter `subscriber_hash` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'subscriber_hash' in params:
path_params['subscriber_hash'] = params['subscriber_hash'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/open-details/{subscriber_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpenActivity', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_recipients(self, campaign_id, **kwargs): # noqa: E501
"""List campaign recipients # noqa: E501
Get information about campaign recipients. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_recipients(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: SentTo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_recipients_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_recipients_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_campaign_recipients_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List campaign recipients # noqa: E501
Get information about campaign recipients. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_recipients_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: SentTo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_recipients" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/sent-to', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SentTo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_campaign_recipient(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get campaign recipient info # noqa: E501
Get information about a specific campaign recipient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_recipient(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: SentTo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_campaign_recipient_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
else:
(data) = self.get_campaign_recipient_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
return data
def get_campaign_recipient_with_http_info(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get campaign recipient info # noqa: E501
Get information about a specific campaign recipient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_campaign_recipient_with_http_info(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: SentTo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'subscriber_hash', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_campaign_recipient" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'subscriber_hash' is set
if ('subscriber_hash' not in params or
params['subscriber_hash'] is None):
raise ValueError("Missing the required parameter `subscriber_hash` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'subscriber_hash' in params:
path_params['subscriber_hash'] = params['subscriber_hash'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/sent-to/{subscriber_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SentTo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sub_reports_for_campaign(self, campaign_id, **kwargs): # noqa: E501
"""List child campaign reports # noqa: E501
Get a list of reports with child campaigns for a specific parent campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sub_reports_for_campaign(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: CampaignSubReports
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_sub_reports_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_sub_reports_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_sub_reports_for_campaign_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List child campaign reports # noqa: E501
Get a list of reports with child campaigns for a specific parent campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sub_reports_for_campaign_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: CampaignSubReports
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sub_reports_for_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/sub-reports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CampaignSubReports', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_unsubscribed_list_for_campaign(self, campaign_id, **kwargs): # noqa: E501
"""List unsubscribed members # noqa: E501
Get information about members who have unsubscribed from a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unsubscribed_list_for_campaign(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: Unsubscribes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_unsubscribed_list_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
else:
(data) = self.get_unsubscribed_list_for_campaign_with_http_info(campaign_id, **kwargs) # noqa: E501
return data
def get_unsubscribed_list_for_campaign_with_http_info(self, campaign_id, **kwargs): # noqa: E501
"""List unsubscribed members # noqa: E501
Get information about members who have unsubscribed from a specific campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unsubscribed_list_for_campaign_with_http_info(campaign_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:return: Unsubscribes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'fields', 'exclude_fields', 'count', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_unsubscribed_list_for_campaign" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/unsubscribed', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Unsubscribes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_unsubscribed_list_member(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get unsubscribed member # noqa: E501
Get information about a specific list member who unsubscribed from a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unsubscribed_list_member(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: Unsubscribes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_unsubscribed_list_member_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
else:
(data) = self.get_unsubscribed_list_member_with_http_info(campaign_id, subscriber_hash, **kwargs) # noqa: E501
return data
def get_unsubscribed_list_member_with_http_info(self, campaign_id, subscriber_hash, **kwargs): # noqa: E501
"""Get unsubscribed member # noqa: E501
Get information about a specific list member who unsubscribed from a campaign. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unsubscribed_list_member_with_http_info(campaign_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str campaign_id: The unique id for the campaign. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: Unsubscribes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['campaign_id', 'subscriber_hash', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_unsubscribed_list_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'campaign_id' is set
if ('campaign_id' not in params or
params['campaign_id'] is None):
raise ValueError("Missing the required parameter `campaign_id` when calling ``") # noqa: E501
# verify the required parameter 'subscriber_hash' is set
if ('subscriber_hash' not in params or
params['subscriber_hash'] is None):
raise ValueError("Missing the required parameter `subscriber_hash` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'campaign_id' in params:
path_params['campaign_id'] = params['campaign_id'] # noqa: E501
if 'subscriber_hash' in params:
path_params['subscriber_hash'] = params['subscriber_hash'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/reports/{campaign_id}/unsubscribed/{subscriber_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Unsubscribes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.054726 | 241 | 0.645061 | 16,332 | 133,406 | 5.065638 | 0.01843 | 0.048349 | 0.018904 | 0.022337 | 0.987441 | 0.984553 | 0.982897 | 0.978835 | 0.977687 | 0.975306 | 0 | 0.020829 | 0.261173 | 133,406 | 2,612 | 242 | 51.074273 | 0.818544 | 0.418767 | 0 | 0.864112 | 1 | 0.006272 | 0.227278 | 0.047321 | 0 | 0 | 0 | 0 | 0 | 1 | 0.031359 | false | 0 | 0.002787 | 0 | 0.080836 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b1258d3cf9db357066f081d4c5d01d1a3d47e8d8 | 2,755 | py | Python | tests/test_lin.py | leisure0808/ldfparser | 6310985cbee2d3ea3b1153c2e5d6281309675760 | [
"MIT"
] | 23 | 2020-10-18T12:22:13.000Z | 2022-03-10T10:59:28.000Z | tests/test_lin.py | leisure0808/ldfparser | 6310985cbee2d3ea3b1153c2e5d6281309675760 | [
"MIT"
] | 68 | 2020-09-28T11:23:29.000Z | 2022-03-31T19:22:59.000Z | tests/test_lin.py | leisure0808/ldfparser | 6310985cbee2d3ea3b1153c2e5d6281309675760 | [
"MIT"
] | 10 | 2020-10-14T23:02:40.000Z | 2022-01-15T14:29:10.000Z | # pylint: disable=invalid-name
import pytest
from ldfparser.lin import LIN_VERSION_1_3, LIN_VERSION_2_0, LIN_VERSION_2_1, LIN_VERSION_2_2
@pytest.mark.unit()
@pytest.mark.parametrize(
('version', 'expected'),
[
(LIN_VERSION_1_3, "1.3"),
(LIN_VERSION_2_0, "2.0"),
(LIN_VERSION_2_1, "2.1"),
(LIN_VERSION_2_2, "2.2")
]
)
def test_linversion_str(version, expected):
assert str(version) == expected
@pytest.mark.unit()
@pytest.mark.parametrize(
('a', 'b'),
[
(LIN_VERSION_1_3, LIN_VERSION_1_3),
(LIN_VERSION_2_0, LIN_VERSION_2_0),
(LIN_VERSION_2_1, LIN_VERSION_2_1)
]
)
def test_linversion_equal_version(a, b):
assert a == b
@pytest.mark.unit()
@pytest.mark.parametrize(
('a', 'b'),
[
(LIN_VERSION_1_3, LIN_VERSION_2_1),
(LIN_VERSION_2_0, LIN_VERSION_2_1),
(LIN_VERSION_2_2, LIN_VERSION_2_1),
(LIN_VERSION_2_2, "2.2")
]
)
def test_linversion_not_equal_version(a, b):
assert a != b
@pytest.mark.unit()
@pytest.mark.parametrize(
('a', 'b'),
[
(LIN_VERSION_1_3, LIN_VERSION_2_1),
(LIN_VERSION_2_0, LIN_VERSION_2_1),
(LIN_VERSION_2_1, LIN_VERSION_2_2)
]
)
def test_linversion_less_than_version(a, b):
assert a < b
@pytest.mark.unit()
@pytest.mark.parametrize(
('a', 'b'),
[
(LIN_VERSION_2_0, LIN_VERSION_1_3),
(LIN_VERSION_2_1, LIN_VERSION_2_0),
(LIN_VERSION_2_2, LIN_VERSION_2_1)
]
)
def test_linversion_greater_than_version(a, b):
assert a > b
@pytest.mark.unit()
@pytest.mark.parametrize(
('a', 'b'),
[
(LIN_VERSION_1_3, LIN_VERSION_1_3),
(LIN_VERSION_1_3, LIN_VERSION_2_0),
(LIN_VERSION_1_3, LIN_VERSION_2_1)
]
)
def test_linversion_less_than_equal_version(a, b):
assert a <= b
@pytest.mark.unit()
@pytest.mark.parametrize(
('a', 'b'),
[
(LIN_VERSION_2_0, LIN_VERSION_1_3),
(LIN_VERSION_2_0, LIN_VERSION_2_0),
(LIN_VERSION_2_1, LIN_VERSION_2_0)
]
)
def test_linversion_greater_than_equal_version(a, b):
assert a >= b
@pytest.mark.unit()
@pytest.mark.parametrize(
('version', 'expected'),
[
(LIN_VERSION_1_3, 1.3),
(LIN_VERSION_2_0, 2.0),
(LIN_VERSION_2_1, 2.1),
(LIN_VERSION_2_2, 2.2)
]
)
def test_linversion_float(version, expected):
assert float(version) == expected
@pytest.mark.unit()
@pytest.mark.parametrize(
('func', 'arg'),
[
(LIN_VERSION_1_3.__gt__, "2.0"),
(LIN_VERSION_1_3.__lt__, "2.0"),
(LIN_VERSION_1_3.__ge__, "2.0"),
(LIN_VERSION_1_3.__le__, "2.0")
]
)
def test_linversion_typerror(func, arg):
with pytest.raises(TypeError):
func(arg)
| 23.547009 | 92 | 0.634846 | 423 | 2,755 | 3.647754 | 0.096927 | 0.343487 | 0.256643 | 0.13221 | 0.874919 | 0.826312 | 0.799093 | 0.76604 | 0.700583 | 0.700583 | 0 | 0.061481 | 0.22069 | 2,755 | 116 | 93 | 23.75 | 0.657196 | 0.010163 | 0 | 0.358491 | 0 | 0 | 0.02789 | 0 | 0 | 0 | 0 | 0 | 0.075472 | 1 | 0.084906 | false | 0 | 0.018868 | 0 | 0.103774 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b16e147110d3ea0f6878818adb436a8121bc2818 | 7,360 | py | Python | data_loaders.py | axyzhao/pytorch-AdaIN | 74493724efbac4acaef5af06423ad520ed985b3a | [
"MIT"
] | null | null | null | data_loaders.py | axyzhao/pytorch-AdaIN | 74493724efbac4acaef5af06423ad520ed985b3a | [
"MIT"
] | null | null | null | data_loaders.py | axyzhao/pytorch-AdaIN | 74493724efbac4acaef5af06423ad520ed985b3a | [
"MIT"
] | null | null | null | from torch import nn
import numpy as np
import torch
from torch import nn, optim
from collections import OrderedDict
from torchvision import datasets, transforms
from torchvision import utils
from torch.utils.data.sampler import SubsetRandomSampler
def get_mnist_train_valid_loader(data_dir,
batch_size,
random_seed,
valid_size=0.2,
shuffle=True,
show_sample=False,
num_workers=1,
pin_memory=True):
"""
Utility function for loading and returning train and valid
multi-process iterators over the MNIST dataset. A sample
9x9 grid of the images can be optionally displayed.
If using CUDA, num_workers should be set to 1 and pin_memory to True.
Params
------
- data_dir: path directory to the dataset.
- batch_size: how many samples per batch to load.
- augment: whether to apply the data augmentation scheme
mentioned in the paper. Only applied on the train split.
- random_seed: fix seed for reproducibility.
- valid_size: percentage split of the training set used for
the validation set. Should be a float in the range [0, 1].
- shuffle: whether to shuffle the train/validation indices.
- show_sample: plot 9x9 sample grid of the dataset.
- num_workers: number of subprocesses to use when loading the dataset.
- pin_memory: whether to copy tensors into CUDA pinned memory. Set it to
True if using GPU.
Returns
-------
- train_loader: training set iterator.
- valid_loader: validation set iterator.
"""
error_msg = "[!] valid_size should be in the range [0, 1]."
assert ((valid_size >= 0) and (valid_size <= 1)), error_msg
# define transforms
transform = transforms.Compose([
transforms.Pad(2),
transforms.ToTensor(),
transforms.Lambda(lambda x: x.repeat(3,1,1)),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
# load the dataset
train_dataset = datasets.MNIST(root=data_dir, train=True,
download=True, transform=transform)
valid_dataset = datasets.MNIST(root=data_dir, train=True,
download=True, transform=transform)
num_train = len(train_dataset)
indices = list(range(num_train))
split = int(np.floor(valid_size * num_train))
if shuffle == True:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_idx, valid_idx = indices[split:], indices[:split]
train_sampler = SubsetRandomSampler(train_idx)
valid_sampler = SubsetRandomSampler(valid_idx)
train_loader = torch.utils.data.DataLoader(train_dataset,
batch_size=batch_size, sampler=train_sampler,
num_workers=num_workers, pin_memory=pin_memory)
valid_loader = torch.utils.data.DataLoader(valid_dataset,
batch_size=1, sampler=valid_sampler,
num_workers=num_workers, pin_memory=pin_memory)
# visualize some images
if show_sample:
sample_loader = torch.utils.data.DataLoader(train_dataset,
batch_size=9,
shuffle=shuffle,
num_workers=num_workers,
pin_memory=pin_memory)
data_iter = iter(sample_loader)
images, labels = data_iter.next()
X = images.numpy()
plot_images(X, labels)
return (train_loader, valid_loader)
def get_svhn_train_valid_loader(data_dir,
batch_size,
random_seed,
valid_size=0.2,
shuffle=True,
show_sample=False,
num_workers=1,
pin_memory=True):
"""
Utility function for loading and returning train and valid
multi-process iterators over the MNIST dataset. A sample
9x9 grid of the images can be optionally displayed.
If using CUDA, num_workers should be set to 1 and pin_memory to True.
Params
------
- data_dir: path directory to the dataset.
- batch_size: how many samples per batch to load.
- augment: whether to apply the data augmentation scheme
mentioned in the paper. Only applied on the train split.
- random_seed: fix seed for reproducibility.
- valid_size: percentage split of the training set used for
the validation set. Should be a float in the range [0, 1].
- shuffle: whether to shuffle the train/validation indices.
- show_sample: plot 9x9 sample grid of the dataset.
- num_workers: number of subprocesses to use when loading the dataset.
- pin_memory: whether to copy tensors into CUDA pinned memory. Set it to
True if using GPU.
Returns
-------
- train_loader: training set iterator.
- valid_loader: validation set iterator.
"""
error_msg = "[!] valid_size should be in the range [0, 1]."
assert ((valid_size >= 0) and (valid_size <= 1)), error_msg
# define transforms
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
# load the dataset
train_dataset = datasets.SVHN(root=data_dir, split='train',
download=True, transform=transform)
valid_dataset = datasets.SVHN(root=data_dir, split='test',
download=True, transform=transform)
num_train = len(train_dataset)
indices = list(range(num_train))
split = int(np.floor(valid_size * num_train))
if shuffle == True:
np.random.seed(random_seed)
np.random.shuffle(indices)
train_idx, valid_idx = indices[split:], indices[:split]
train_sampler = SubsetRandomSampler(train_idx)
valid_sampler = SubsetRandomSampler(valid_idx)
train_loader = torch.utils.data.DataLoader(train_dataset,
batch_size=batch_size, sampler=train_sampler,
num_workers=num_workers, pin_memory=pin_memory)
valid_loader = torch.utils.data.DataLoader(valid_dataset,
batch_size=1, sampler=valid_sampler,
num_workers=num_workers, pin_memory=pin_memory)
# visualize some images
if show_sample:
sample_loader = torch.utils.data.DataLoader(train_dataset,
batch_size=9,
shuffle=shuffle,
num_workers=num_workers,
pin_memory=pin_memory)
data_iter = iter(sample_loader)
images, labels = data_iter.next()
X = images.numpy()
plot_images(X, labels)
return (train_loader, valid_loader)
| 40.662983 | 80 | 0.583016 | 849 | 7,360 | 4.879859 | 0.1649 | 0.043447 | 0.007241 | 0.009655 | 0.926382 | 0.926382 | 0.926382 | 0.903452 | 0.903452 | 0.903452 | 0 | 0.012433 | 0.344293 | 7,360 | 180 | 81 | 40.888889 | 0.846042 | 0.287228 | 0 | 0.833333 | 0 | 0 | 0.02 | 0 | 0 | 0 | 0 | 0 | 0.020833 | 1 | 0.020833 | false | 0 | 0.083333 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b189928e6335196a5463e3d789224946f3d1199d | 127 | py | Python | Model/__init__.py | ayyyq/T-LSTM | 36dbc88ac710d3925851cd87c2368ecfc7061b70 | [
"MIT"
] | 1 | 2020-12-02T08:36:10.000Z | 2020-12-02T08:36:10.000Z | Model/__init__.py | ayyyq/T-LSTM | 36dbc88ac710d3925851cd87c2368ecfc7061b70 | [
"MIT"
] | null | null | null | Model/__init__.py | ayyyq/T-LSTM | 36dbc88ac710d3925851cd87c2368ecfc7061b70 | [
"MIT"
] | 1 | 2020-12-02T09:14:32.000Z | 2020-12-02T09:14:32.000Z | # from .baseline_xlnet_re import Baseline_xlnet_re
from .baseline_bert import BaselineBert
from .Baseline import GraphBaseline | 42.333333 | 51 | 0.866142 | 17 | 127 | 6.176471 | 0.470588 | 0.342857 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102362 | 127 | 3 | 52 | 42.333333 | 0.921053 | 0.385827 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
49104bb23f9c86263fc964d8e6b3075581bc68ea | 6,744 | py | Python | python/examples/scenarios/meta/collector_meta.py | Smart-Ag/xviz | 71c4470fdcb5c497793eb53666da6a5feb6832f0 | [
"Apache-2.0"
] | 1 | 2021-06-03T04:02:49.000Z | 2021-06-03T04:02:49.000Z | python/examples/scenarios/meta/collector_meta.py | Smart-Ag/xviz | 71c4470fdcb5c497793eb53666da6a5feb6832f0 | [
"Apache-2.0"
] | null | null | null | python/examples/scenarios/meta/collector_meta.py | Smart-Ag/xviz | 71c4470fdcb5c497793eb53666da6a5feb6832f0 | [
"Apache-2.0"
] | null | null | null | import xviz_avs as xviz
def get_builder():
builder = xviz.XVIZMetadataBuilder()
builder.stream("/vehicle_pose")\
.category(xviz.CATEGORY.POSE)
builder.stream("/radar_filtered_out_targets")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [255, 255, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.CIRCLE)
builder.stream("/radar_passed_filter_targets")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [255, 0, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.CIRCLE)
builder.stream("/radar_id")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 0, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.TEXT)
builder.stream("/smartmicro_radar_targets")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({
'fill_color': [255, 0, 0],
'height': 0.6,
'extruded': True,
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYGON)
builder.stream("/camera_targets")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 255, 255]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.CIRCLE)
builder.stream("/tracking_targets")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.CIRCLE)
builder.stream("/tracking_id")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 0, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.TEXT)
builder.stream("/combine")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({
'stroke_width': 0.3,
'stroke_color': [128, 0, 128],
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/auger")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({
'stroke_width': 0.3,
'stroke_color': [255, 69, 0],
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/field_definition")\
.coordinate(xviz.COORDINATE_TYPES.IDENTITY)\
.stream_style({
'stroke_color': [40, 150, 40, 128],
'stroke_width': 0.3,
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/planning_map")\
.coordinate(xviz.COORDINATE_TYPES.IDENTITY)\
.stream_style({
'stroke_color': [0, 0, 255, 128],
'stroke_width': 0.3,
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/vision_polygons")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'stroke_color': [0, 128, 128, 128]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/predictive_polygons")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'stroke_color': [128, 128, 0, 128]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/planned_path")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({
'stroke_width': 0.2,
'stroke_color': [0, 170, 220, 200]
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/control_signal")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'stroke_color': [128, 0, 128, 128]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/sync_status")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 0, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.TEXT)
builder.stream("/tractor_speed")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 0, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.TEXT)
builder.stream("/combine_speed")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 0, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.TEXT)
builder.stream("/set_speed")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 0, 0]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.TEXT)
builder.stream("/sync_point")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'fill_color': [0, 128, 30]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.CIRCLE)
builder.stream("/breadcrumbs")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({
'stroke_width': 0.2,
'stroke_color': [255, 50, 20, 200]
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/radar_fov")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'stroke_color': [255, 0, 0, 100]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/camera_fov")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({'stroke_color': [0, 150, 200, 100]})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.POLYLINE)
builder.stream("/measuring_circles")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({
'stroked': True,
'filled': False,
'stroke_width': 0.2,
'stroke_color': [0, 0, 0, 20],
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.CIRCLE)
builder.stream("/measuring_circles_lbl")\
.coordinate(xviz.COORDINATE_TYPES.VEHICLE_RELATIVE)\
.stream_style({
'fill_color': [0, 0, 0]
})\
.category(xviz.CATEGORY.PRIMITIVE)\
.type(xviz.PRIMITIVE_TYPES.TEXT)
return builder
| 38.101695 | 60 | 0.628855 | 712 | 6,744 | 5.733146 | 0.117978 | 0.082803 | 0.127389 | 0.177609 | 0.886085 | 0.885595 | 0.885595 | 0.866732 | 0.840764 | 0.811612 | 0 | 0.032634 | 0.223013 | 6,744 | 176 | 61 | 38.318182 | 0.746374 | 0 | 0 | 0.660377 | 0 | 0 | 0.113138 | 0.015125 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006289 | false | 0.006289 | 0.006289 | 0 | 0.018868 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4917fee0e90b2a2868612b2ac3dfa1a07496659b | 17,842 | py | Python | rdfframes/test_queries/test_joins_old.py | qcri/RDFframe | 2a50105479051c134cc5eddc9e20d55b755ef765 | [
"MIT"
] | 13 | 2019-07-06T00:10:11.000Z | 2022-02-20T02:14:16.000Z | rdfframes/test_queries/test_joins_old.py | qcri/RDFrame | 2a50105479051c134cc5eddc9e20d55b755ef765 | [
"MIT"
] | 1 | 2019-05-20T08:51:42.000Z | 2019-05-20T08:51:42.000Z | rdfframes/test_queries/test_joins_old.py | qcri/RDFframe | 2a50105479051c134cc5eddc9e20d55b755ef765 | [
"MIT"
] | 3 | 2020-04-17T10:50:37.000Z | 2022-03-23T01:30:16.000Z | import time
from rdfframes.knowledge_graph import KnowledgeGraph
from rdfframes.utils.constants import JoinType
def test_expandable_expandable_join(join_type, optional1=False, optional2=False):
start = time.time()
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
"xsd": "http://www.example.org/",
"foaf": "http://xmlns.com/foaf/0.1/"
})
# return all the instances of the tweet class
dataset = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset1',
entities_col_name='tweet')
dataset = dataset.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweep', False),
('sioc:content', 'text', optional1)
]).select_cols(['tweep', 'tweet'])
graph = KnowledgeGraph('twitter2', 'https://twitter2.com',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
"to": "http://twitter.com/ontology/",
"dcterms": "http://purl.org/dc/terms/",
})
dataset2 = graph.entities(class_name='sioc:UserAccount',
new_dataset_name='dataset2',
entities_col_name='tweeter')
dataset2 = dataset2.expand(src_col_name='tweeter', predicate_list=[
('sioc:has_name', 'name', optional2)
]).select_cols(['tweeter'])
dataset.join(dataset2,'tweep','tweeter','tweep', join_type)
sparql_query = dataset.to_sparql()
print("SPARQL query with {} =\n{}\n".format(join_type, sparql_query))
def test_join_instead_of_expand(join_type):
start = time.time()
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
"to": "http://twitter.com/ontology/",
"dcterms": "http://purl.org/dc/terms/",
"xsd": "http://www.example.org/",
"foaf": "http://xmlns.com/foaf/0.1/"
})
# return all the instances of the tweet class
dataset1 = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset',
entities_col_name='tweet')\
.expand(src_col_name='tweet', predicate_list=[('sioc:has_creater', 'tweep', False)])
dataset2 = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset',
entities_col_name='tweet')\
.expand(src_col_name='tweet', predicate_list=[('sioc:content', 'text', False)])
dataset2.join(dataset1, 'tweet', 'tweet', 'tweet', join_type)
sparql_query = dataset2.to_sparql()
print("SPARQL query with {} =\n{}\n".format(join_type, sparql_query))
def test_expandable_expandable_3_joins(join_type):
start = time.time()
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com/',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
"to": "http://twitter.com/ontology/",
"dcterms": "http://purl.org/dc/terms/",
"xsd": "http://www.example.org/",
"foaf": "http://xmlns.com/foaf/0.1/"
})
# return all the instances of the tweet class
dataset = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset1',
entities_col_name='tweet')
dataset = dataset.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweep', False),
('sioc:content', 'text', False)
])
dataset2 = graph.entities(class_name='sioc:UserAccount',
new_dataset_name='dataset2',
entities_col_name='tweep')
dataset2 = dataset2.expand(src_col_name='tweep', predicate_list=[
('sioc:has_name', 'name', False),
('sioc:has_follower', 'follower', False)
])
dataset2.join(dataset, 'tweep', 'tweep', 'tweep', join_type)
dataset3 = graph.entities(class_name='sioc:UserAccount',
new_dataset_name='dataset3',
entities_col_name='tweeter')
dataset3 = dataset3.expand(src_col_name='tweeter', predicate_list=[
('sioc:has_id', 'id', False)
])
dataset3.join(dataset2, 'tweeter', 'follower', 'follower', join_type)
sparql_query = dataset3.to_sparql()
print("SPARQL query with {} =\n{}\n".format(join_type, sparql_query))
def test_expandable_expandable_join_w_selectcols():
start = time.time()
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
"to": "http://twitter.com/ontology/",
"dcterms": "http://purl.org/dc/terms/",
"xsd": "http://www.example.org/",
"foaf": "http://xmlns.com/foaf/0.1/"
})
# return all the instances of the tweet class
dataset = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset1',
entities_col_name='tweet')
dataset = dataset.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweep', False),
('sioc:content', 'text', False)
]).select_cols(['tweep', 'text'])
dataset2 = graph.entities(class_name='sioct:tweeter',
new_dataset_name='dataset2',
entities_col_name='tweep')
dataset2 = dataset2.expand(src_col_name='tweep', predicate_list=[
('sioc:has_name', 'name', False)
]).select_cols(['tweep', 'name'])
dataset.join(dataset2, 'tweep', 'tweep', 'tweep', JoinType.InnerJoin)
sparql_query = dataset.to_sparql()
print("SPARQL query =\n{}\n".format(sparql_query))
def test_expandable_grouped_join(join_type):
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com/',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
"to": "http://twitter.com/ontology/",
"dcterms": "http://purl.org/dc/terms/",
"xsd": "http://www.example.org/",
"foaf": "http://xmlns.com/foaf/0.1/"
})
# return all the instances of the tweet class
dataset = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset1',
entities_col_name='tweet')
dataset = dataset.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweep', False),
('sioc:content', 'text', False)
])
dataset2 = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='tweets',
entities_col_name='tweet')
dataset2 = dataset2.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweeter')
]).group_by(['tweeter']).count('tweet', 'tweets_count').filter(
conditions_dict={'tweets_count': ['>= {}'.format(200), '<= {}'.format(300)]})
dataset.join(dataset2, 'tweep', 'tweeter', 'user', join_type)
#dataset.select_cols(['user'])
sparql_query = dataset.to_sparql()
print("SPARQL query with {} =\n{}\n".format(join_type, sparql_query))
# endpoint = 'http://10.161.202.101:8890/sparql/'
# port = 8890
# output_format = HttpClientDataFormat.PANDAS_DF
# max_rows = 1000000
# timeout = 12000
# default_graph_url = 'http://twitter.com/'
# client = HttpClient(endpoint_url=endpoint,
# port=port,
# return_format=output_format,
# timeout=timeout,
# default_graph_uri=default_graph_url,
# max_rows=max_rows
# )
#
# df = dataset.execute(client, return_format=output_format)
# print(df.head(10))
def test_grouped_expandable_join(join_type):
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com/',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
"to": "http://twitter.com/ontology/",
"dcterms": "http://purl.org/dc/terms/",
"xsd": "http://www.example.org/",
"foaf": "http://xmlns.com/foaf/0.1/"
})
# return all the instances of the tweet class
dataset = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset1',
entities_col_name='tweet')
dataset = dataset.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweep', False),
('sioc:content', 'text', False)
])
dataset2 = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='tweets',
entities_col_name='tweet')
dataset2 = dataset2.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweeter')
]).group_by(['tweeter']).count('tweet', 'tweets_count').filter(
conditions_dict={'tweets_count': ['>= {}'.format(200), '<= {}'.format(300)]})
dataset2= dataset2.expand(src_col_name='tweeter', predicate_list=[ ('rdf:type', 'sioc:UserAccount')])
dataset2.join(dataset, 'tweeter', 'tweep', 'user', join_type)
dataset2.select_cols(['user'])
sparql_query = dataset2.to_sparql()
print("SPARQL query with {} =\n{}\n".format(join_type, sparql_query))
def test_grouped_grouped_join_diff_graphs(join_type):
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com/',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
})
# return all the instances of the tweet class
dataset = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset1',
entities_col_name='tweet')
dataset = dataset.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweep', False),
('sioc:content', 'text', False)])\
.group_by(['tweep']).count('tweet', 'tweets_count')\
.filter({'tweets_count': ['>= {}'.format(1000)]})
graph2 = KnowledgeGraph('twitter2', 'https://twitter2.com/',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc2": "http://rdfs.org/sioc2/ns#",
"sioct2": "http://rdfs.org/sioc2/types#",
})
dataset2 = graph2.entities(class_name='sioct2:twitterPost',
new_dataset_name='tweets',
entities_col_name='tweet')
dataset2 = dataset2.expand(src_col_name='tweet', predicate_list=[
('sioc2:has_creater', 'tweeter')
]).group_by(['tweeter']).count('tweet', 'tweets_count2', unique=False)\
.filter(conditions_dict={'tweets_count2': ['>= {}'.format(200), '<= {}'.format(300)]})
dataset.join(dataset2, 'tweep', 'tweeter', 'user', join_type)
dataset.select_cols(['user'])
sparql_query = dataset.to_sparql()
print("SPARQL query with {} =\n{}\n".format(join_type, sparql_query))
def test_grouped_grouped_join(join_type):
# create a knowledge graph to store the graph uri and prefixes
graph = KnowledgeGraph('twitter', 'https://twitter.com/',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc": "http://rdfs.org/sioc/ns#",
"sioct": "http://rdfs.org/sioc/types#",
})
# return all the instances of the tweet class
dataset = graph.entities(class_name='sioct:microblogPost',
new_dataset_name='dataset1',
entities_col_name='tweet')
dataset = dataset.expand(src_col_name='tweet', predicate_list=[
('sioc:has_creater', 'tweep', False),
('sioc:content', 'text', False)])\
.group_by(['tweep']).count('tweet', 'tweets_count')\
.filter({'tweets_count': ['>= {}'.format(1000)]})
graph2 = KnowledgeGraph('twitter', 'https://twitter.com/',
prefixes={
"rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
"sioc2": "http://rdfs.org/sioc2/ns#",
"sioct2": "http://rdfs.org/sioc2/types#",
})
dataset2 = graph2.entities(class_name='sioct2:twitterPost',
new_dataset_name='tweets',
entities_col_name='tweet')
dataset2 = dataset2.expand(src_col_name='tweet', predicate_list=[
('sioc2:has_creater', 'tweeter')
]).group_by(['tweeter']).count('tweet', 'tweets_count2', unique=False)\
.filter(conditions_dict={'tweets_count2': ['>= {}'.format(200), '<= {}'.format(300)]})
dataset.join(dataset2, 'tweep', 'tweeter', 'user', join_type)
dataset.select_cols(['user'])
sparql_query = dataset.to_sparql()
print("SPARQL query with {} =\n{}\n".format(join_type, sparql_query))
if __name__ == '__main__':
# TODO: remove the last dot from the triple patterns in one block
test_expandable_expandable_join(JoinType.InnerJoin)
test_expandable_expandable_join(JoinType.LeftOuterJoin)
test_expandable_expandable_join(JoinType.RightOuterJoin)
test_expandable_expandable_join(JoinType.OuterJoin)
test_expandable_expandable_join(JoinType.InnerJoin, True, True)
test_expandable_expandable_join(JoinType.LeftOuterJoin, True, True)
test_expandable_expandable_join(JoinType.RightOuterJoin, True, True)
test_expandable_expandable_join(JoinType.InnerJoin, False, True)
test_expandable_expandable_join(JoinType.InnerJoin, True, False)
test_expandable_expandable_join(JoinType.OuterJoin, False, True)
test_join_instead_of_expand(JoinType.InnerJoin)
test_join_instead_of_expand(JoinType.LeftOuterJoin)
test_join_instead_of_expand(JoinType.RightOuterJoin)
test_join_instead_of_expand(JoinType.OuterJoin)
test_expandable_expandable_3_joins(JoinType.InnerJoin)
test_expandable_expandable_join_w_selectcols()
test_expandable_grouped_join(JoinType.InnerJoin)
test_expandable_grouped_join(JoinType.LeftOuterJoin)
# TODO: Opt: put the patterns from the optional dataset (first) outside a nested query when possible
test_expandable_grouped_join(JoinType.RightOuterJoin)
test_expandable_grouped_join(JoinType.OuterJoin)
# TODO: Opt: add the outer triple patterns in a grouped dataset to the outer query in an inner join
test_grouped_expandable_join(JoinType.InnerJoin)
test_grouped_expandable_join(JoinType.LeftOuterJoin)
### test the join on non-groupby columns
test_grouped_expandable_join(JoinType.RightOuterJoin)
test_grouped_grouped_join(JoinType.InnerJoin)
test_grouped_grouped_join(JoinType.LeftOuterJoin)
test_grouped_grouped_join(JoinType.RightOuterJoin)
test_grouped_grouped_join(JoinType.OuterJoin)
| 49.423823 | 105 | 0.554646 | 1,857 | 17,842 | 5.127087 | 0.092084 | 0.025733 | 0.03277 | 0.028358 | 0.859994 | 0.805693 | 0.753282 | 0.72818 | 0.712005 | 0.690474 | 0 | 0.020964 | 0.304899 | 17,842 | 360 | 106 | 49.561111 | 0.746734 | 0.095505 | 0 | 0.744526 | 0 | 0 | 0.247577 | 0 | 0 | 0 | 0 | 0.002778 | 0 | 1 | 0.029197 | false | 0 | 0.010949 | 0 | 0.040146 | 0.029197 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
49518d2a702597c3ad675a1e9c6f3bccc1218a5c | 6,741 | py | Python | sku_db.py | uniqueinx/swenson | 6a70ec1b077aa6f0506a2bb1cadb45422ed85094 | [
"MIT"
] | null | null | null | sku_db.py | uniqueinx/swenson | 6a70ec1b077aa6f0506a2bb1cadb45422ed85094 | [
"MIT"
] | null | null | null | sku_db.py | uniqueinx/swenson | 6a70ec1b077aa6f0506a2bb1cadb45422ed85094 | [
"MIT"
] | null | null | null | from models import (
Product,
CoffeeMachine,
CoffeePod,
ModelEnum,
PackSizeEnum,
CoffeeMachineEnum,
CoffeePodEnum,
CoffeePod,
CoffeeFlavorEnum,
)
from typing import List
sku: List[Product] = [
CoffeeMachine(
id="CM001",
product_type=CoffeeMachineEnum.COFFEE_MACHINE_SMALL,
model=ModelEnum.BASE_MODEL,
),
CoffeeMachine(
id="CM002",
product_type=CoffeeMachineEnum.COFFEE_MACHINE_SMALL,
model=ModelEnum.PREMIUM_MODEL,
),
CoffeeMachine(
id="CM003",
product_type=CoffeeMachineEnum.COFFEE_MACHINE_SMALL,
model=ModelEnum.DELUX_MODEL,
water_line_compatible=True,
),
CoffeeMachine(
id="CM101",
product_type=CoffeeMachineEnum.COFFEE_MACHINE_LARGE,
model=ModelEnum.BASE_MODEL,
),
CoffeeMachine(
id="CM102",
product_type=CoffeeMachineEnum.COFFEE_MACHINE_LARGE,
model=ModelEnum.PREMIUM_MODEL,
water_line_compatible=True,
),
CoffeeMachine(
id="CM103",
product_type=CoffeeMachineEnum.COFFEE_MACHINE_LARGE,
model=ModelEnum.DELUX_MODEL,
water_line_compatible=True,
),
CoffeeMachine(
id="EM001",
product_type=CoffeeMachineEnum.ESPRESSO_MACHINE,
model=ModelEnum.BASE_MODEL,
),
CoffeeMachine(
id="EM002",
product_type=CoffeeMachineEnum.ESPRESSO_MACHINE,
model=ModelEnum.PREMIUM_MODEL,
),
CoffeeMachine(
id="EM003",
product_type=CoffeeMachineEnum.ESPRESSO_MACHINE,
model=ModelEnum.DELUX_MODEL,
water_line_compatible=True,
),
CoffeePod(
id="CP001",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_VANILLA,
),
CoffeePod(
id="CP003",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_VANILLA,
),
CoffeePod(
id="CP011",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_CARAMEL,
),
CoffeePod(
id="CP013",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_CARAMEL,
),
CoffeePod(
id="CP021",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_PSL,
),
CoffeePod(
id="CP023",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_PSL,
),
CoffeePod(
id="CP031",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_MOCHA,
),
CoffeePod(
id="CP033",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_MOCHA,
),
CoffeePod(
id="CP041",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_HAZELNUT,
),
CoffeePod(
id="CP043",
product_type=CoffeePodEnum.COFFEE_POD_SMALL,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_HAZELNUT,
),
CoffeePod(
id="CP101",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_VANILLA,
),
CoffeePod(
id="CP103",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_VANILLA,
),
CoffeePod(
id="CP111",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_CARAMEL,
),
CoffeePod(
id="CP113",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_CARAMEL,
),
CoffeePod(
id="CP121",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_PSL,
),
CoffeePod(
id="CP123",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_PSL,
),
CoffeePod(
id="CP131",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_MOCHA,
),
CoffeePod(
id="CP133",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_MOCHA,
),
CoffeePod(
id="CP141",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_1,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_HAZELNUT,
),
CoffeePod(
id="CP143",
product_type=CoffeePodEnum.COFFEE_POD_LARGE,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_HAZELNUT,
),
CoffeePod(
id="EP003",
product_type=CoffeePodEnum.ESPRESSO_POD,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_VANILLA,
),
CoffeePod(
id="EP005",
product_type=CoffeePodEnum.ESPRESSO_POD,
pack_size=PackSizeEnum.DOZEN_5,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_VANILLA,
),
CoffeePod(
id="EP007",
product_type=CoffeePodEnum.ESPRESSO_POD,
pack_size=PackSizeEnum.DOZEN_7,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_VANILLA,
),
CoffeePod(
id="EP013",
product_type=CoffeePodEnum.ESPRESSO_POD,
pack_size=PackSizeEnum.DOZEN_3,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_CARAMEL,
),
CoffeePod(
id="EP015",
product_type=CoffeePodEnum.ESPRESSO_POD,
pack_size=PackSizeEnum.DOZEN_5,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_CARAMEL,
),
CoffeePod(
id="EP017",
product_type=CoffeePodEnum.ESPRESSO_POD,
pack_size=PackSizeEnum.DOZEN_7,
coffee_flavor=CoffeeFlavorEnum.COFFEE_FLAVOR_CARAMEL,
),
]
| 30.502262 | 62 | 0.676161 | 663 | 6,741 | 6.508296 | 0.117647 | 0.144612 | 0.144612 | 0.150637 | 0.915875 | 0.915875 | 0.885052 | 0.838702 | 0.738355 | 0.738355 | 0 | 0.025606 | 0.241062 | 6,741 | 220 | 63 | 30.640909 | 0.817826 | 0 | 0 | 0.785388 | 0 | 0 | 0.025961 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.009132 | 0 | 0.009132 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
49779566c90863518dd51baca20211e685cc4861 | 20,094 | py | Python | python/pyspark/pandas/tests/test_window.py | zhangkuantian/Spark | 4f43421a5b33988a841c49d11d8b916e9d4414f4 | [
"BSD-2-Clause",
"Apache-2.0",
"CC0-1.0",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2022-02-09T19:02:18.000Z | 2022-02-09T19:02:18.000Z | python/pyspark/pandas/tests/test_window.py | zhangkuantian/Spark | 4f43421a5b33988a841c49d11d8b916e9d4414f4 | [
"BSD-2-Clause",
"Apache-2.0",
"CC0-1.0",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 6 | 2018-06-14T11:15:27.000Z | 2019-01-27T12:11:23.000Z | python/pyspark/pandas/tests/test_window.py | zhangkuantian/Spark | 4f43421a5b33988a841c49d11d8b916e9d4414f4 | [
"BSD-2-Clause",
"Apache-2.0",
"CC0-1.0",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2021-09-19T22:04:32.000Z | 2021-09-19T22:04:32.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import inspect
from pyspark import pandas as ps
from pyspark.pandas.exceptions import PandasNotImplementedError
from pyspark.pandas.missing.window import (
MissingPandasLikeExpanding,
MissingPandasLikeRolling,
MissingPandasLikeExpandingGroupby,
MissingPandasLikeRollingGroupby,
MissingPandasLikeExponentialMoving,
MissingPandasLikeExponentialMovingGroupby,
)
from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils
class ExpandingRollingTest(PandasOnSparkTestCase, TestUtils):
def test_missing(self):
psdf = ps.DataFrame({"a": [1, 2, 3, 4, 5, 6, 7, 8, 9]})
# Expanding functions
missing_functions = inspect.getmembers(MissingPandasLikeExpanding, inspect.isfunction)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.expanding(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.expanding(1), name)() # Series
# Rolling functions
missing_functions = inspect.getmembers(MissingPandasLikeRolling, inspect.isfunction)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.rolling(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
# ExponentialMoving functions
missing_functions = inspect.getmembers(
MissingPandasLikeExponentialMoving, inspect.isfunction
)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.ewm(com=0.5), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.a.ewm(com=0.5), name)() # Series
# Expanding properties
missing_properties = inspect.getmembers(
MissingPandasLikeExpanding, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.expanding(1), name) # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.expanding(1), name) # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.expanding(1), name) # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.expanding(1), name) # Series
# Rolling properties
missing_properties = inspect.getmembers(
MissingPandasLikeRolling, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.rolling(1), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
# ExponentialMoving properties
missing_properties = inspect.getmembers(
MissingPandasLikeExponentialMoving, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.ewm(com=0.5), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.a.ewm(com=0.5), name)() # Series
def test_missing_groupby(self):
psdf = ps.DataFrame({"a": [1, 2, 3, 4, 5, 6, 7, 8, 9]})
# Expanding functions
missing_functions = inspect.getmembers(
MissingPandasLikeExpandingGroupby, inspect.isfunction
)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.groupby("a").expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series
# Rolling functions
missing_functions = inspect.getmembers(MissingPandasLikeRollingGroupby, inspect.isfunction)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).rolling(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
# ExponentialMoving functions
missing_functions = inspect.getmembers(
MissingPandasLikeExponentialMovingGroupby, inspect.isfunction
)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).ewm(com=0.5), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.a.ewm(com=0.5), name)() # Series
# Expanding properties
missing_properties = inspect.getmembers(
MissingPandasLikeExpandingGroupby, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.expanding(1), name) # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.expanding(1), name) # Series
# Rolling properties
missing_properties = inspect.getmembers(
MissingPandasLikeRollingGroupby, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).rolling(1), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
# ExponentialMoving properties
missing_properties = inspect.getmembers(
MissingPandasLikeExponentialMovingGroupby, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).ewm(com=0.5), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.ewm(com=0.5), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*ExponentialMoving.*{}.*is deprecated".format(name),
):
getattr(psdf.a.ewm(com=0.5), name)() # Series
if __name__ == "__main__":
import unittest
from pyspark.pandas.tests.test_window import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| 43.493506 | 99 | 0.579725 | 1,708 | 20,094 | 6.665691 | 0.088993 | 0.029513 | 0.105402 | 0.210804 | 0.850242 | 0.850242 | 0.850242 | 0.850242 | 0.850242 | 0.850242 | 0 | 0.00655 | 0.3086 | 20,094 | 461 | 100 | 43.587852 | 0.812927 | 0.067931 | 0 | 0.8275 | 0 | 0 | 0.150563 | 0.073351 | 0 | 0 | 0 | 0 | 0.12 | 1 | 0.005 | false | 0 | 0.0225 | 0 | 0.03 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
4983d8e4e56f47aee04c1a05f492b8711677d924 | 126 | py | Python | src/test/data/pa3/AdditionalTestCase/UnitTest/Arithmetic_MOD.py | Leo-Enrique-Wu/chocopy_compiler_code_generation | 4606be0531b3de77411572aae98f73169f46b3b9 | [
"BSD-2-Clause"
] | null | null | null | src/test/data/pa3/AdditionalTestCase/UnitTest/Arithmetic_MOD.py | Leo-Enrique-Wu/chocopy_compiler_code_generation | 4606be0531b3de77411572aae98f73169f46b3b9 | [
"BSD-2-Clause"
] | null | null | null | src/test/data/pa3/AdditionalTestCase/UnitTest/Arithmetic_MOD.py | Leo-Enrique-Wu/chocopy_compiler_code_generation | 4606be0531b3de77411572aae98f73169f46b3b9 | [
"BSD-2-Clause"
] | null | null | null | x:int = 2147483647
print(25%4)
print(-25%-4)
print(-250%4)
print(250%-60)
print(x%-22)
print(-x%-33)
print(x%25)
print(-x%15)
| 12.6 | 18 | 0.642857 | 27 | 126 | 3 | 0.407407 | 0.296296 | 0.197531 | 0.320988 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.286957 | 0.087302 | 126 | 9 | 19 | 14 | 0.417391 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0.888889 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
b8cb55d0b3fa8d99784768c1bf5792890dce667e | 310 | py | Python | bobstack/tests/test_sipentity_tls_sipStatelessProxy.py | bobjects/BobStack | c177b286075044832f44baf9ace201780c8b4320 | [
"Apache-2.0"
] | null | null | null | bobstack/tests/test_sipentity_tls_sipStatelessProxy.py | bobjects/BobStack | c177b286075044832f44baf9ace201780c8b4320 | [
"Apache-2.0"
] | null | null | null | bobstack/tests/test_sipentity_tls_sipStatelessProxy.py | bobjects/BobStack | c177b286075044832f44baf9ace201780c8b4320 | [
"Apache-2.0"
] | null | null | null | from abstractStatelessProxyTestCase import AbstractStatelessProxyTestCase
from ..sipmessaging import SIPURI
from ..siptransport import TLSSIPTransport
from ..sipentity import SIPStatelessProxy
class TestStatelessProxyWithTLSTransport(AbstractStatelessProxyTestCase):
# TODO - need to implement!
pass
| 31 | 73 | 0.848387 | 24 | 310 | 10.958333 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.112903 | 310 | 9 | 74 | 34.444444 | 0.956364 | 0.080645 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 0 | 1 | 0 | true | 0.166667 | 0.666667 | 0 | 0.833333 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 7 |
b8cf0b7bf20fb4d0b3f807bcafb0f02cc568b33a | 145 | py | Python | generated-libraries/python/netapp/vscan/policy_name.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/vscan/policy_name.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/vscan/policy_name.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | class PolicyName(basestring):
"""
Policy name
"""
@staticmethod
def get_api_name():
return "policy-name"
| 14.5 | 30 | 0.537931 | 13 | 145 | 5.846154 | 0.769231 | 0.263158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.351724 | 145 | 9 | 31 | 16.111111 | 0.808511 | 0.075862 | 0 | 0 | 0 | 0 | 0.09322 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
6204df34a27ee138063f6e07d017d5a77965e9d0 | 9,182 | py | Python | src/crl/remotescript/FP.py | beleki/crl-remotescript | b7222e2eb0fcd1e03b8ed6052c053bcc57a3649e | [
"BSD-3-Clause"
] | null | null | null | src/crl/remotescript/FP.py | beleki/crl-remotescript | b7222e2eb0fcd1e03b8ed6052c053bcc57a3649e | [
"BSD-3-Clause"
] | null | null | null | src/crl/remotescript/FP.py | beleki/crl-remotescript | b7222e2eb0fcd1e03b8ed6052c053bcc57a3649e | [
"BSD-3-Clause"
] | null | null | null | from crl.remotescript.RemoteScript import RemoteScript
from crl.remotescript.fpengine import FPEngine
__copyright__ = 'Copyright (C) 2019, Nokia'
__version__ = '__VERSION__'
class FP(RemoteScript):
"""
Multi-node specific extensions to [crl.remotescript.RemoteScript.html|RemoteScript]
library.
Keywords defined in this library are: `Execute Command In Node`,
`Execute Background Command In Node`, `Execute Script In Node`,
`Execute Background Script In Node`, `Copy File To Node`, `Copy File From Node`
"""
def __init__(self):
self._engine = FPEngine()
def node(self, node, command, target='default', exec_id='foreground', timeout=None):
"""*DEPRECATED* Keyword has been renamed to `Execute Command In Node`."""
return self._engine.node_execute(node, command, target, exec_id, timeout)
def execute_command_in_node(self, node, command, target='default',
exec_id='foreground', timeout=None):
"""
Execute command in the node accessed through a primary target.
Like keyword `Execute Command In Target`, but instead of
executing the command in the primary target executes
the command in named node. This keyword first opens connection
to the primary target using connection protocol specified with
`Set Target` keyword. SSH connection is opened from primary
target to the node to execute the command.
*Arguments:*\n
_node_: Target node in which to execute the command.\n
_command_: Bash shell command to execute in target node.\n
_target_: Primary target through which the target node is accessed.\n
_exec_id_: Connection ID to use.\n
_timeout_: Timeout for command in seconds.
*Returns:*\n
[crl.remotescript.result.Result.html|Result] object.
"""
return self._engine.node_execute(node, command, target, exec_id, timeout)
def node_background(self, node, command, target='default', exec_id='background'):
"""*DEPRECATED* Keyword has been renamed to `Execute Background Command In Node`."""
self._engine.node_execute_background(node, command, target, exec_id)
def execute_background_command_in_node(self, node, command, target='default',
exec_id='background'):
"""
Start executing command in the node accessed through a primary target.
Like keyword `Execute Background Command In Target`, but
instead of executing the command in the primary target
executes the command in named node. This keyword first opens
connection to the primary target using connection protocol
specified with `Set Target` keyword. SSH connection is opened
from primary target to the node to execute the command.
This keyword returns immediately and the command is left
running in the background. See `Wait Background Execution` on
how to read command output and `Kill Background Execution` on
how to interrupt the execution.
*Arguments:*\n
_node_: Target node in which to execute the command.\n
_command_: Bash shell command to execute in target node.\n
_target_: Primary target through which the target node is accessed.\n
_exec_id_: Connection ID to use.\n
_timeout_: Timeout for command in seconds.\n
*Returns:*\n
Nothing.\n
"""
self._engine.node_execute_background(node, command, target, exec_id)
def node_script(self, node, file, target='default', exec_id='foreground', timeout=None):
"""*DEPRECATED* Keyword has been renamed to `Execute Script In Node`."""
return self._engine.node_execute_script(node, file, target, exec_id, timeout)
def execute_script_in_node(self, node, file, target='default', exec_id='foreground', timeout=None):
"""
Execute script file in the node accessed through a primary target.
Like keyword `Execute Script In Target`, but instead of
executing the script file in the primary target executes it in
named node. This keyword first copies the script file to the
primary target using the procol specified with `Set
Target` keyword and from the primary target the script file is
copied to the target node using scp command. The script file
is then executed in the node. This call will block until the command
has been executed.
*Arguments:*\n
_node_: Target node in which to execute the script file.\n
_file_: Path to file to execute in the local host (example: my_script.sh).\n
_target_: Name of the target through which to open the connection to the node.\n
_exec_id_: Connection ID to use.\n
_timeout_: Timeout for command in seconds.\n
*Returns:*\n
[crl.remotescript.result.Result.html|Result] object.\n
"""
return self._engine.node_execute_script(node, file, target, exec_id, timeout)
def node_script_background(self, node, file, target='default', exec_id='background'):
"""*DEPRECATED* Keyword has been renamed to `Execute Background Script In Node`."""
self._engine.node_execute_script_background(node, file, target, exec_id)
def execute_background_script_in_node(self, node, file, target='default', exec_id='background'):
"""
Start executing script file in the node accessed through a primary target.
Like keyword `Execute Background Script In Target`, but instead of
executing the script file in the primary target executes it in
named node. This keyword first copies the script file to the
primary target node using the procol specified with `Set
Target` keyword and from the primary node the script file is
copied to the target node using scp command. The script file
is then executed in the node.
This keyword returns immediately and the script is left
running in the background. See `Wait Background Execution` on
how to read command output and `Kill Background Execution` on
how to how to interrupt the execution.
*Arguments:*\n
_node_: Target node in which to execute the script file.\n
_file_: Path to file to execute in the local host (example: my_script.sh).\n
_target_: Name of the target through which to open the connection to the node.\n
_exec_id_: Connection ID to use.\n
_timeout_: Timeout for command in seconds.\n
*Returns:*\n
Nothing.\n
"""
self._engine.node_execute_script_background(node, file, target, exec_id)
def node_put_file(self, node, source_file, destination_dir='.', mode='0755',
target='default', exec_id='foreground', timeout=None):
"""*DEPRECATED* Keyword has been renamed to `Copy File To Node`."""
return self._engine.node_put_file(
node, source_file, destination_dir, mode, target, exec_id, timeout)
def copy_file_to_node(self, node, source_file, destination_dir='.', mode='0755',
target='default', exec_id='foreground', timeout=None):
"""
Copy file from local host through primary target to the target node.
*Arguments:*\n
_node_: Target node where to copy the file to.\n
_source_file_: Local source file.\n
_destination_dir_: Remote destination directory in the node. The directory is \
created if missing.\n
_mode_: Access mode to set to the file in the target.\n
_target_: Name of the target through which to copy the file to the node.\n
_exec_id_: Connection ID to use.\n
_timeout_: Timeout in seconds.\n
*Returns:*\n
[crl.remotescript.result.Result.html|Result] object.\n
"""
return self._engine.node_put_file(
node, source_file, destination_dir, mode, target, exec_id, timeout)
def node_get_file(self, node, source_file, destination='.',
target='default', exec_id='foreground', timeout=None):
"""*DEPRECATED* Keyword has been renamed to `Copy File From Node`."""
return self._engine.node_get_file(node, source_file, destination, target, exec_id, timeout)
def copy_file_from_node(self, node, source_file, destination='.',
target='default', exec_id='foreground', timeout=None):
"""
Copy file from the node through primary target to local host.
*Arguments:*\n
_node_: Target node where to copy the file from.\n
_source_file_: Source file in the node.\n
_destination_: Destination directory or file in local host.\n
_target_: Name of the target through which to copy the file from the node.\n
_exec_id_: Connection ID to use.\n
_timeout_: Timeout in seconds.\n
*Returns:*\n
[crl.remotescript.result.Result.html|Result] object.\n
"""
return self._engine.node_get_file(node, source_file, destination, target, exec_id, timeout)
| 47.087179 | 103 | 0.673274 | 1,229 | 9,182 | 4.872254 | 0.105777 | 0.03006 | 0.034068 | 0.038076 | 0.866065 | 0.85187 | 0.823981 | 0.814128 | 0.803941 | 0.795424 | 0 | 0.001743 | 0.250381 | 9,182 | 194 | 104 | 47.329897 | 0.868226 | 0.575474 | 0 | 0.461538 | 0 | 0 | 0.085308 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.051282 | 0 | 0.615385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
621b73fdb6843746fd3faad67baaacf5b65b58f8 | 39,737 | py | Python | sdk/python/pulumi_rancher2/cluster_alert_rule.py | pulumi/pulumi-rancher2 | 7a98af8cf598b711084a7f46c0fe71b43ed7a8ac | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2020-03-23T15:59:11.000Z | 2021-01-29T00:37:32.000Z | sdk/python/pulumi_rancher2/cluster_alert_rule.py | pulumi/pulumi-rancher2 | 7a98af8cf598b711084a7f46c0fe71b43ed7a8ac | [
"ECL-2.0",
"Apache-2.0"
] | 76 | 2020-01-16T20:00:25.000Z | 2022-03-31T20:30:08.000Z | sdk/python/pulumi_rancher2/cluster_alert_rule.py | pulumi/pulumi-rancher2 | 7a98af8cf598b711084a7f46c0fe71b43ed7a8ac | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-03-27T17:39:59.000Z | 2020-11-24T23:09:24.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ClusterAlertRuleArgs', 'ClusterAlertRule']
@pulumi.input_type
class ClusterAlertRuleArgs:
def __init__(__self__, *,
cluster_id: pulumi.Input[str],
group_id: pulumi.Input[str],
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
event_rule: Optional[pulumi.Input['ClusterAlertRuleEventRuleArgs']] = None,
group_interval_seconds: Optional[pulumi.Input[int]] = None,
group_wait_seconds: Optional[pulumi.Input[int]] = None,
inherited: Optional[pulumi.Input[bool]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
metric_rule: Optional[pulumi.Input['ClusterAlertRuleMetricRuleArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
node_rule: Optional[pulumi.Input['ClusterAlertRuleNodeRuleArgs']] = None,
repeat_interval_seconds: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
system_service_rule: Optional[pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs']] = None):
"""
The set of arguments for constructing a ClusterAlertRule resource.
:param pulumi.Input[str] cluster_id: The cluster id where create cluster alert rule (string)
:param pulumi.Input[str] group_id: The cluster alert rule alert group ID (string)
:param pulumi.Input[Mapping[str, Any]] annotations: The cluster alert rule annotations (map)
:param pulumi.Input['ClusterAlertRuleEventRuleArgs'] event_rule: The cluster alert rule event rule. ConflictsWith: `"metric_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[int] group_interval_seconds: The cluster alert rule group interval seconds. Default: `180` (int)
:param pulumi.Input[int] group_wait_seconds: The cluster alert rule group wait seconds. Default: `180` (int)
:param pulumi.Input[bool] inherited: The cluster alert rule inherited. Default: `true` (bool)
:param pulumi.Input[Mapping[str, Any]] labels: The cluster alert rule labels (map)
:param pulumi.Input['ClusterAlertRuleMetricRuleArgs'] metric_rule: The cluster alert rule metric rule. ConflictsWith: `"event_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[str] name: The cluster alert rule name (string)
:param pulumi.Input['ClusterAlertRuleNodeRuleArgs'] node_rule: The cluster alert rule node rule. ConflictsWith: `"event_rule", "metric_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[int] repeat_interval_seconds: The cluster alert rule wait seconds. Default: `3600` (int)
:param pulumi.Input[str] severity: The cluster alert rule severity. Supported values : `"critical" | "info" | "warning"`. Default: `critical` (string)
:param pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs'] system_service_rule: The cluster alert rule system service rule. ConflictsWith: `"event_rule", "metric_rule", "node_rule"` (list Maxitems:1)
"""
pulumi.set(__self__, "cluster_id", cluster_id)
pulumi.set(__self__, "group_id", group_id)
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if event_rule is not None:
pulumi.set(__self__, "event_rule", event_rule)
if group_interval_seconds is not None:
pulumi.set(__self__, "group_interval_seconds", group_interval_seconds)
if group_wait_seconds is not None:
pulumi.set(__self__, "group_wait_seconds", group_wait_seconds)
if inherited is not None:
pulumi.set(__self__, "inherited", inherited)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if metric_rule is not None:
pulumi.set(__self__, "metric_rule", metric_rule)
if name is not None:
pulumi.set(__self__, "name", name)
if node_rule is not None:
pulumi.set(__self__, "node_rule", node_rule)
if repeat_interval_seconds is not None:
pulumi.set(__self__, "repeat_interval_seconds", repeat_interval_seconds)
if severity is not None:
pulumi.set(__self__, "severity", severity)
if system_service_rule is not None:
pulumi.set(__self__, "system_service_rule", system_service_rule)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Input[str]:
"""
The cluster id where create cluster alert rule (string)
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> pulumi.Input[str]:
"""
The cluster alert rule alert group ID (string)
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
The cluster alert rule annotations (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="eventRule")
def event_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleEventRuleArgs']]:
"""
The cluster alert rule event rule. ConflictsWith: `"metric_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "event_rule")
@event_rule.setter
def event_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleEventRuleArgs']]):
pulumi.set(self, "event_rule", value)
@property
@pulumi.getter(name="groupIntervalSeconds")
def group_interval_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The cluster alert rule group interval seconds. Default: `180` (int)
"""
return pulumi.get(self, "group_interval_seconds")
@group_interval_seconds.setter
def group_interval_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "group_interval_seconds", value)
@property
@pulumi.getter(name="groupWaitSeconds")
def group_wait_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The cluster alert rule group wait seconds. Default: `180` (int)
"""
return pulumi.get(self, "group_wait_seconds")
@group_wait_seconds.setter
def group_wait_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "group_wait_seconds", value)
@property
@pulumi.getter
def inherited(self) -> Optional[pulumi.Input[bool]]:
"""
The cluster alert rule inherited. Default: `true` (bool)
"""
return pulumi.get(self, "inherited")
@inherited.setter
def inherited(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "inherited", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
The cluster alert rule labels (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="metricRule")
def metric_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleMetricRuleArgs']]:
"""
The cluster alert rule metric rule. ConflictsWith: `"event_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "metric_rule")
@metric_rule.setter
def metric_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleMetricRuleArgs']]):
pulumi.set(self, "metric_rule", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The cluster alert rule name (string)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nodeRule")
def node_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleNodeRuleArgs']]:
"""
The cluster alert rule node rule. ConflictsWith: `"event_rule", "metric_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "node_rule")
@node_rule.setter
def node_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleNodeRuleArgs']]):
pulumi.set(self, "node_rule", value)
@property
@pulumi.getter(name="repeatIntervalSeconds")
def repeat_interval_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The cluster alert rule wait seconds. Default: `3600` (int)
"""
return pulumi.get(self, "repeat_interval_seconds")
@repeat_interval_seconds.setter
def repeat_interval_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "repeat_interval_seconds", value)
@property
@pulumi.getter
def severity(self) -> Optional[pulumi.Input[str]]:
"""
The cluster alert rule severity. Supported values : `"critical" | "info" | "warning"`. Default: `critical` (string)
"""
return pulumi.get(self, "severity")
@severity.setter
def severity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "severity", value)
@property
@pulumi.getter(name="systemServiceRule")
def system_service_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs']]:
"""
The cluster alert rule system service rule. ConflictsWith: `"event_rule", "metric_rule", "node_rule"` (list Maxitems:1)
"""
return pulumi.get(self, "system_service_rule")
@system_service_rule.setter
def system_service_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs']]):
pulumi.set(self, "system_service_rule", value)
@pulumi.input_type
class _ClusterAlertRuleState:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
event_rule: Optional[pulumi.Input['ClusterAlertRuleEventRuleArgs']] = None,
group_id: Optional[pulumi.Input[str]] = None,
group_interval_seconds: Optional[pulumi.Input[int]] = None,
group_wait_seconds: Optional[pulumi.Input[int]] = None,
inherited: Optional[pulumi.Input[bool]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
metric_rule: Optional[pulumi.Input['ClusterAlertRuleMetricRuleArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
node_rule: Optional[pulumi.Input['ClusterAlertRuleNodeRuleArgs']] = None,
repeat_interval_seconds: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
system_service_rule: Optional[pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs']] = None):
"""
Input properties used for looking up and filtering ClusterAlertRule resources.
:param pulumi.Input[Mapping[str, Any]] annotations: The cluster alert rule annotations (map)
:param pulumi.Input[str] cluster_id: The cluster id where create cluster alert rule (string)
:param pulumi.Input['ClusterAlertRuleEventRuleArgs'] event_rule: The cluster alert rule event rule. ConflictsWith: `"metric_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[str] group_id: The cluster alert rule alert group ID (string)
:param pulumi.Input[int] group_interval_seconds: The cluster alert rule group interval seconds. Default: `180` (int)
:param pulumi.Input[int] group_wait_seconds: The cluster alert rule group wait seconds. Default: `180` (int)
:param pulumi.Input[bool] inherited: The cluster alert rule inherited. Default: `true` (bool)
:param pulumi.Input[Mapping[str, Any]] labels: The cluster alert rule labels (map)
:param pulumi.Input['ClusterAlertRuleMetricRuleArgs'] metric_rule: The cluster alert rule metric rule. ConflictsWith: `"event_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[str] name: The cluster alert rule name (string)
:param pulumi.Input['ClusterAlertRuleNodeRuleArgs'] node_rule: The cluster alert rule node rule. ConflictsWith: `"event_rule", "metric_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[int] repeat_interval_seconds: The cluster alert rule wait seconds. Default: `3600` (int)
:param pulumi.Input[str] severity: The cluster alert rule severity. Supported values : `"critical" | "info" | "warning"`. Default: `critical` (string)
:param pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs'] system_service_rule: The cluster alert rule system service rule. ConflictsWith: `"event_rule", "metric_rule", "node_rule"` (list Maxitems:1)
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if event_rule is not None:
pulumi.set(__self__, "event_rule", event_rule)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if group_interval_seconds is not None:
pulumi.set(__self__, "group_interval_seconds", group_interval_seconds)
if group_wait_seconds is not None:
pulumi.set(__self__, "group_wait_seconds", group_wait_seconds)
if inherited is not None:
pulumi.set(__self__, "inherited", inherited)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if metric_rule is not None:
pulumi.set(__self__, "metric_rule", metric_rule)
if name is not None:
pulumi.set(__self__, "name", name)
if node_rule is not None:
pulumi.set(__self__, "node_rule", node_rule)
if repeat_interval_seconds is not None:
pulumi.set(__self__, "repeat_interval_seconds", repeat_interval_seconds)
if severity is not None:
pulumi.set(__self__, "severity", severity)
if system_service_rule is not None:
pulumi.set(__self__, "system_service_rule", system_service_rule)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
The cluster alert rule annotations (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The cluster id where create cluster alert rule (string)
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="eventRule")
def event_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleEventRuleArgs']]:
"""
The cluster alert rule event rule. ConflictsWith: `"metric_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "event_rule")
@event_rule.setter
def event_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleEventRuleArgs']]):
pulumi.set(self, "event_rule", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[str]]:
"""
The cluster alert rule alert group ID (string)
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter(name="groupIntervalSeconds")
def group_interval_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The cluster alert rule group interval seconds. Default: `180` (int)
"""
return pulumi.get(self, "group_interval_seconds")
@group_interval_seconds.setter
def group_interval_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "group_interval_seconds", value)
@property
@pulumi.getter(name="groupWaitSeconds")
def group_wait_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The cluster alert rule group wait seconds. Default: `180` (int)
"""
return pulumi.get(self, "group_wait_seconds")
@group_wait_seconds.setter
def group_wait_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "group_wait_seconds", value)
@property
@pulumi.getter
def inherited(self) -> Optional[pulumi.Input[bool]]:
"""
The cluster alert rule inherited. Default: `true` (bool)
"""
return pulumi.get(self, "inherited")
@inherited.setter
def inherited(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "inherited", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
The cluster alert rule labels (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="metricRule")
def metric_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleMetricRuleArgs']]:
"""
The cluster alert rule metric rule. ConflictsWith: `"event_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "metric_rule")
@metric_rule.setter
def metric_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleMetricRuleArgs']]):
pulumi.set(self, "metric_rule", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The cluster alert rule name (string)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nodeRule")
def node_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleNodeRuleArgs']]:
"""
The cluster alert rule node rule. ConflictsWith: `"event_rule", "metric_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "node_rule")
@node_rule.setter
def node_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleNodeRuleArgs']]):
pulumi.set(self, "node_rule", value)
@property
@pulumi.getter(name="repeatIntervalSeconds")
def repeat_interval_seconds(self) -> Optional[pulumi.Input[int]]:
"""
The cluster alert rule wait seconds. Default: `3600` (int)
"""
return pulumi.get(self, "repeat_interval_seconds")
@repeat_interval_seconds.setter
def repeat_interval_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "repeat_interval_seconds", value)
@property
@pulumi.getter
def severity(self) -> Optional[pulumi.Input[str]]:
"""
The cluster alert rule severity. Supported values : `"critical" | "info" | "warning"`. Default: `critical` (string)
"""
return pulumi.get(self, "severity")
@severity.setter
def severity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "severity", value)
@property
@pulumi.getter(name="systemServiceRule")
def system_service_rule(self) -> Optional[pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs']]:
"""
The cluster alert rule system service rule. ConflictsWith: `"event_rule", "metric_rule", "node_rule"` (list Maxitems:1)
"""
return pulumi.get(self, "system_service_rule")
@system_service_rule.setter
def system_service_rule(self, value: Optional[pulumi.Input['ClusterAlertRuleSystemServiceRuleArgs']]):
pulumi.set(self, "system_service_rule", value)
class ClusterAlertRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
event_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleEventRuleArgs']]] = None,
group_id: Optional[pulumi.Input[str]] = None,
group_interval_seconds: Optional[pulumi.Input[int]] = None,
group_wait_seconds: Optional[pulumi.Input[int]] = None,
inherited: Optional[pulumi.Input[bool]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
metric_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleMetricRuleArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
node_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleNodeRuleArgs']]] = None,
repeat_interval_seconds: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
system_service_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleSystemServiceRuleArgs']]] = None,
__props__=None):
"""
Provides a Rancher v2 Cluster Alert Rule resource. This can be used to create Cluster Alert Rule for Rancher v2 environments and retrieve their information.
## Example Usage
```python
import pulumi
import pulumi_rancher2 as rancher2
# Create a new Rancher2 Cluster Alert Group
foo_cluster_alert_group = rancher2.ClusterAlertGroup("fooClusterAlertGroup",
cluster_id="<cluster_id>",
description="Terraform cluster alert group",
group_interval_seconds=300,
repeat_interval_seconds=3600)
# Create a new Rancher2 Cluster Alert Rule
foo_cluster_alert_rule = rancher2.ClusterAlertRule("fooClusterAlertRule",
cluster_id=foo_cluster_alert_group.cluster_id,
group_id=foo_cluster_alert_group.id,
group_interval_seconds=600,
repeat_interval_seconds=6000)
```
## Import
Cluster Alert Rule can be imported using the Rancher cluster alert rule ID
```sh
$ pulumi import rancher2:index/clusterAlertRule:ClusterAlertRule foo <CLUSTER_ALERT_RULE_ID>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] annotations: The cluster alert rule annotations (map)
:param pulumi.Input[str] cluster_id: The cluster id where create cluster alert rule (string)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleEventRuleArgs']] event_rule: The cluster alert rule event rule. ConflictsWith: `"metric_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[str] group_id: The cluster alert rule alert group ID (string)
:param pulumi.Input[int] group_interval_seconds: The cluster alert rule group interval seconds. Default: `180` (int)
:param pulumi.Input[int] group_wait_seconds: The cluster alert rule group wait seconds. Default: `180` (int)
:param pulumi.Input[bool] inherited: The cluster alert rule inherited. Default: `true` (bool)
:param pulumi.Input[Mapping[str, Any]] labels: The cluster alert rule labels (map)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleMetricRuleArgs']] metric_rule: The cluster alert rule metric rule. ConflictsWith: `"event_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[str] name: The cluster alert rule name (string)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleNodeRuleArgs']] node_rule: The cluster alert rule node rule. ConflictsWith: `"event_rule", "metric_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[int] repeat_interval_seconds: The cluster alert rule wait seconds. Default: `3600` (int)
:param pulumi.Input[str] severity: The cluster alert rule severity. Supported values : `"critical" | "info" | "warning"`. Default: `critical` (string)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleSystemServiceRuleArgs']] system_service_rule: The cluster alert rule system service rule. ConflictsWith: `"event_rule", "metric_rule", "node_rule"` (list Maxitems:1)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterAlertRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Rancher v2 Cluster Alert Rule resource. This can be used to create Cluster Alert Rule for Rancher v2 environments and retrieve their information.
## Example Usage
```python
import pulumi
import pulumi_rancher2 as rancher2
# Create a new Rancher2 Cluster Alert Group
foo_cluster_alert_group = rancher2.ClusterAlertGroup("fooClusterAlertGroup",
cluster_id="<cluster_id>",
description="Terraform cluster alert group",
group_interval_seconds=300,
repeat_interval_seconds=3600)
# Create a new Rancher2 Cluster Alert Rule
foo_cluster_alert_rule = rancher2.ClusterAlertRule("fooClusterAlertRule",
cluster_id=foo_cluster_alert_group.cluster_id,
group_id=foo_cluster_alert_group.id,
group_interval_seconds=600,
repeat_interval_seconds=6000)
```
## Import
Cluster Alert Rule can be imported using the Rancher cluster alert rule ID
```sh
$ pulumi import rancher2:index/clusterAlertRule:ClusterAlertRule foo <CLUSTER_ALERT_RULE_ID>
```
:param str resource_name: The name of the resource.
:param ClusterAlertRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterAlertRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
event_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleEventRuleArgs']]] = None,
group_id: Optional[pulumi.Input[str]] = None,
group_interval_seconds: Optional[pulumi.Input[int]] = None,
group_wait_seconds: Optional[pulumi.Input[int]] = None,
inherited: Optional[pulumi.Input[bool]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
metric_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleMetricRuleArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
node_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleNodeRuleArgs']]] = None,
repeat_interval_seconds: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
system_service_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleSystemServiceRuleArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterAlertRuleArgs.__new__(ClusterAlertRuleArgs)
__props__.__dict__["annotations"] = annotations
if cluster_id is None and not opts.urn:
raise TypeError("Missing required property 'cluster_id'")
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["event_rule"] = event_rule
if group_id is None and not opts.urn:
raise TypeError("Missing required property 'group_id'")
__props__.__dict__["group_id"] = group_id
__props__.__dict__["group_interval_seconds"] = group_interval_seconds
__props__.__dict__["group_wait_seconds"] = group_wait_seconds
__props__.__dict__["inherited"] = inherited
__props__.__dict__["labels"] = labels
__props__.__dict__["metric_rule"] = metric_rule
__props__.__dict__["name"] = name
__props__.__dict__["node_rule"] = node_rule
__props__.__dict__["repeat_interval_seconds"] = repeat_interval_seconds
__props__.__dict__["severity"] = severity
__props__.__dict__["system_service_rule"] = system_service_rule
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="rancher2:index/clusterAlterRule:ClusterAlterRule")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ClusterAlertRule, __self__).__init__(
'rancher2:index/clusterAlertRule:ClusterAlertRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
event_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleEventRuleArgs']]] = None,
group_id: Optional[pulumi.Input[str]] = None,
group_interval_seconds: Optional[pulumi.Input[int]] = None,
group_wait_seconds: Optional[pulumi.Input[int]] = None,
inherited: Optional[pulumi.Input[bool]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
metric_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleMetricRuleArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
node_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleNodeRuleArgs']]] = None,
repeat_interval_seconds: Optional[pulumi.Input[int]] = None,
severity: Optional[pulumi.Input[str]] = None,
system_service_rule: Optional[pulumi.Input[pulumi.InputType['ClusterAlertRuleSystemServiceRuleArgs']]] = None) -> 'ClusterAlertRule':
"""
Get an existing ClusterAlertRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] annotations: The cluster alert rule annotations (map)
:param pulumi.Input[str] cluster_id: The cluster id where create cluster alert rule (string)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleEventRuleArgs']] event_rule: The cluster alert rule event rule. ConflictsWith: `"metric_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[str] group_id: The cluster alert rule alert group ID (string)
:param pulumi.Input[int] group_interval_seconds: The cluster alert rule group interval seconds. Default: `180` (int)
:param pulumi.Input[int] group_wait_seconds: The cluster alert rule group wait seconds. Default: `180` (int)
:param pulumi.Input[bool] inherited: The cluster alert rule inherited. Default: `true` (bool)
:param pulumi.Input[Mapping[str, Any]] labels: The cluster alert rule labels (map)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleMetricRuleArgs']] metric_rule: The cluster alert rule metric rule. ConflictsWith: `"event_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[str] name: The cluster alert rule name (string)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleNodeRuleArgs']] node_rule: The cluster alert rule node rule. ConflictsWith: `"event_rule", "metric_rule", "system_service_rule"`` (list Maxitems:1)
:param pulumi.Input[int] repeat_interval_seconds: The cluster alert rule wait seconds. Default: `3600` (int)
:param pulumi.Input[str] severity: The cluster alert rule severity. Supported values : `"critical" | "info" | "warning"`. Default: `critical` (string)
:param pulumi.Input[pulumi.InputType['ClusterAlertRuleSystemServiceRuleArgs']] system_service_rule: The cluster alert rule system service rule. ConflictsWith: `"event_rule", "metric_rule", "node_rule"` (list Maxitems:1)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ClusterAlertRuleState.__new__(_ClusterAlertRuleState)
__props__.__dict__["annotations"] = annotations
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["event_rule"] = event_rule
__props__.__dict__["group_id"] = group_id
__props__.__dict__["group_interval_seconds"] = group_interval_seconds
__props__.__dict__["group_wait_seconds"] = group_wait_seconds
__props__.__dict__["inherited"] = inherited
__props__.__dict__["labels"] = labels
__props__.__dict__["metric_rule"] = metric_rule
__props__.__dict__["name"] = name
__props__.__dict__["node_rule"] = node_rule
__props__.__dict__["repeat_interval_seconds"] = repeat_interval_seconds
__props__.__dict__["severity"] = severity
__props__.__dict__["system_service_rule"] = system_service_rule
return ClusterAlertRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def annotations(self) -> pulumi.Output[Mapping[str, Any]]:
"""
The cluster alert rule annotations (map)
"""
return pulumi.get(self, "annotations")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[str]:
"""
The cluster id where create cluster alert rule (string)
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="eventRule")
def event_rule(self) -> pulumi.Output[Optional['outputs.ClusterAlertRuleEventRule']]:
"""
The cluster alert rule event rule. ConflictsWith: `"metric_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "event_rule")
@property
@pulumi.getter(name="groupId")
def group_id(self) -> pulumi.Output[str]:
"""
The cluster alert rule alert group ID (string)
"""
return pulumi.get(self, "group_id")
@property
@pulumi.getter(name="groupIntervalSeconds")
def group_interval_seconds(self) -> pulumi.Output[Optional[int]]:
"""
The cluster alert rule group interval seconds. Default: `180` (int)
"""
return pulumi.get(self, "group_interval_seconds")
@property
@pulumi.getter(name="groupWaitSeconds")
def group_wait_seconds(self) -> pulumi.Output[Optional[int]]:
"""
The cluster alert rule group wait seconds. Default: `180` (int)
"""
return pulumi.get(self, "group_wait_seconds")
@property
@pulumi.getter
def inherited(self) -> pulumi.Output[Optional[bool]]:
"""
The cluster alert rule inherited. Default: `true` (bool)
"""
return pulumi.get(self, "inherited")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, Any]]:
"""
The cluster alert rule labels (map)
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="metricRule")
def metric_rule(self) -> pulumi.Output[Optional['outputs.ClusterAlertRuleMetricRule']]:
"""
The cluster alert rule metric rule. ConflictsWith: `"event_rule", "node_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "metric_rule")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The cluster alert rule name (string)
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nodeRule")
def node_rule(self) -> pulumi.Output[Optional['outputs.ClusterAlertRuleNodeRule']]:
"""
The cluster alert rule node rule. ConflictsWith: `"event_rule", "metric_rule", "system_service_rule"`` (list Maxitems:1)
"""
return pulumi.get(self, "node_rule")
@property
@pulumi.getter(name="repeatIntervalSeconds")
def repeat_interval_seconds(self) -> pulumi.Output[Optional[int]]:
"""
The cluster alert rule wait seconds. Default: `3600` (int)
"""
return pulumi.get(self, "repeat_interval_seconds")
@property
@pulumi.getter
def severity(self) -> pulumi.Output[Optional[str]]:
"""
The cluster alert rule severity. Supported values : `"critical" | "info" | "warning"`. Default: `critical` (string)
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter(name="systemServiceRule")
def system_service_rule(self) -> pulumi.Output[Optional['outputs.ClusterAlertRuleSystemServiceRule']]:
"""
The cluster alert rule system service rule. ConflictsWith: `"event_rule", "metric_rule", "node_rule"` (list Maxitems:1)
"""
return pulumi.get(self, "system_service_rule")
| 48.400731 | 227 | 0.663865 | 4,458 | 39,737 | 5.681247 | 0.046433 | 0.080783 | 0.090023 | 0.068267 | 0.921507 | 0.911912 | 0.899791 | 0.893276 | 0.885814 | 0.876298 | 0 | 0.004767 | 0.223998 | 39,737 | 820 | 228 | 48.459756 | 0.816578 | 0.33533 | 0 | 0.842217 | 1 | 0 | 0.143038 | 0.074422 | 0 | 0 | 0 | 0 | 0 | 1 | 0.164179 | false | 0.002132 | 0.014925 | 0 | 0.277186 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6283195241fa10a4e8035627e1b28c05d96fd5a5 | 9,831 | py | Python | graphtheory/traversing/dfs.py | mashal02/graphs-dict | 39917d8a7f3bdcd5d95f3549ca054d16ba535e90 | [
"BSD-3-Clause"
] | 36 | 2015-09-20T20:55:39.000Z | 2021-09-20T05:49:03.000Z | graphtheory/traversing/dfs.py | mashal02/graphs-dict | 39917d8a7f3bdcd5d95f3549ca054d16ba535e90 | [
"BSD-3-Clause"
] | 6 | 2016-03-25T21:41:46.000Z | 2020-02-12T03:18:59.000Z | graphtheory/traversing/dfs.py | mashal02/graphs-dict | 39917d8a7f3bdcd5d95f3549ca054d16ba535e90 | [
"BSD-3-Clause"
] | 9 | 2016-09-12T07:57:27.000Z | 2022-03-21T16:15:39.000Z | #!/usr/bin/python
import sys
try:
from Queue import LifoQueue
except ImportError: # Python 3
from queue import LifoQueue
class DFSWithStack:
"""Depth-First Search with a stack.
Attributes
----------
graph : input graph
color : dict with nodes, private ('WHITE', 'GREY', 'BLACK')
time : number, private
dd : dict with nodes ('GREY' time)
ff : dict with nodes ('BLACK' time)
parent : dict (DFS tree)
dag : graph (DFS tree)
Examples
--------
>>> from graphtheory.structures.edges import Edge
>>> from graphtheory.structures.graphs import Graph
>>> from graphtheory.traversing.dfs import DFSWithStack
>>> G = Graph(n=10, False) # an exemplary undirected graph
# Add nodes and edges here.
>>> order = list()
>>> algorithm = DFSWithStack(G)
>>> algorithm.run(source=0, pre_action=lambda node: order.append(node))
>>> order # visited nodes
>>> algorithm.parent # DFS tree as a dict
>>> algorithm.dag # DFS tree as a directed graph
>>> algorithm.dd
>>> algorithm.ff
Notes
-----
Based on:
Cormen, T. H., Leiserson, C. E., Rivest, R. L., and Stein, C., 2009,
Introduction to Algorithms, third edition, The MIT Press,
Cambridge, London.
https://en.wikipedia.org/wiki/Depth-first_search
"""
def __init__(self, graph):
"""The algorithm initialization."""
self.graph = graph
self.color = dict(((node, "WHITE") for node in self.graph.iternodes()))
self.parent = dict(((node, None) for node in self.graph.iternodes()))
self.time = 0 # time stamp
self.dd = dict()
self.ff = dict()
self.dag = self.graph.__class__(self.graph.v(), directed=True)
for node in self.graph.iternodes(): # isolated nodes are possible
self.dag.add_node(node)
def run(self, source=None, pre_action=None, post_action=None):
"""Executable pseudocode."""
if source is not None:
self._visit(source, pre_action, post_action)
else:
for node in self.graph.iternodes():
if self.color[node] == "WHITE":
self._visit(node, pre_action, post_action)
def _visit(self, node, pre_action=None, post_action=None):
"""Explore the connected component,"""
self.time = self.time + 1
self.dd[node] = self.time
self.color[node] = "GREY"
Q = LifoQueue()
Q.put(node) # node is GREY
if pre_action: # when Q.put
pre_action(node)
while not Q.empty():
source = Q.get() # GREY node is processed
for edge in self.graph.iteroutedges(source):
if self.color[edge.target] == "WHITE":
self.parent[edge.target] = source
self.dag.add_edge(edge)
self.time = self.time + 1
self.dd[edge.target] = self.time
self.color[edge.target] = "GREY"
Q.put(edge.target) # target is GREY
if pre_action: # when Q.put
pre_action(edge.target)
self.time = self.time + 1
self.ff[source] = self.time
self.color[source] = "BLACK"
if post_action: # source became BLACK
post_action(source)
def path(self, source, target):
"""Construct a path from source to target."""
if source == target:
return [source]
elif self.parent[target] is None:
raise ValueError("no path to target")
else:
return self.path(source, self.parent[target]) + [target]
class DFSWithRecursion:
"""Depth-First Search with a recursion.
Attributes
----------
graph : input graph
color : dict with nodes, private ('WHITE', 'GREY', 'BLACK')
time : number, private
dd : dict with nodes ('GREY' time)
ff : dict with nodes ('BLACK' time)
parent : dict (DFS tree)
dag : graph (DFS tree)
Examples
--------
>>> from graphtheory.structures.edges import Edge
>>> from graphtheory.structures.graphs import Graph
>>> from graphtheory.traversing.dfs import DFSWithRecursion
>>> G = Graph(n=10, False) # an exemplary undirected graph
# Add nodes and edges here.
>>> order = list()
>>> algorithm = DFSWithRecursion(G)
>>> algorithm.run(source=0, pre_action=lambda node: order.append(node))
>>> order # visited nodes
>>> algorithm.parent # DFS tree as a dict
>>> algorithm.dag # DFS tree as a directed graph
>>> algorithm.dd
>>> algorithm.ff
Notes
-----
Based on:
Cormen, T. H., Leiserson, C. E., Rivest, R. L., and Stein, C., 2009,
Introduction to Algorithms, third edition, The MIT Press,
Cambridge, London.
https://en.wikipedia.org/wiki/Depth-first_search
"""
def __init__(self, graph):
"""The algorithm initialization."""
self.graph = graph
self.color = dict(((node, "WHITE") for node in self.graph.iternodes()))
self.parent = dict(((node, None) for node in self.graph.iternodes()))
self.time = 0 # time stamp
self.dd = dict()
self.ff = dict()
self.dag = self.graph.__class__(self.graph.v(), directed=True)
for node in self.graph.iternodes(): # isolated nodes are possible
self.dag.add_node(node)
recursionlimit = sys.getrecursionlimit()
sys.setrecursionlimit(max(self.graph.v() * 2, recursionlimit))
def run(self, source=None, pre_action=None, post_action=None):
"""Executable pseudocode."""
if source is not None:
self._visit(source, pre_action, post_action)
else:
for node in self.graph.iternodes():
if self.color[node] == "WHITE":
self._visit(node, pre_action, post_action)
def _visit(self, node, pre_action=None, post_action=None):
"""Explore recursively the connected component."""
self.time = self.time + 1
self.dd[node] = self.time
self.color[node] = "GREY"
if pre_action: # _visit started
pre_action(node)
for edge in self.graph.iteroutedges(node):
if self.color[edge.target] == "WHITE":
self.parent[edge.target] = node
self.dag.add_edge(edge)
self._visit(edge.target, pre_action, post_action)
self.time = self.time + 1
self.ff[node] = self.time
self.color[node] = "BLACK"
if post_action: # node became BLACK
post_action(node)
def path(self, source, target):
"""Construct a path from source to target."""
if source == target:
return [source]
elif self.parent[target] is None:
raise ValueError("no path to target")
else:
return self.path(source, self.parent[target]) + [target]
class SimpleDFS:
"""Depth-First Search with a recursion.
Attributes
----------
graph : input graph
parent : dict (DFS tree)
dag : graph (DFS tree)
Examples
--------
>>> from graphtheory.structures.edges import Edge
>>> from graphtheory.structures.graphs import Graph
>>> from graphtheory.traversing.dfs import SimpleDFS
>>> G = Graph(n=10, False) # an exemplary undirected graph
# Add nodes and edges here.
>>> order = list()
>>> algorithm = SimpleDFS(G)
>>> algorithm.run(source=0, pre_action=lambda node: order.append(node))
>>> order # visited nodes
>>> algorithm.parent # DFS tree as a dict
>>> algorithm.dag # DFS tree as a directed graph
Notes
-----
Based on:
Cormen, T. H., Leiserson, C. E., Rivest, R. L., and Stein, C., 2009,
Introduction to Algorithms, third edition, The MIT Press,
Cambridge, London.
https://en.wikipedia.org/wiki/Depth-first_search
"""
def __init__(self, graph):
"""The algorithm initialization."""
self.graph = graph
self.parent = dict()
self.dag = self.graph.__class__(self.graph.v(), directed=True)
for node in self.graph.iternodes(): # isolated nodes are possible
self.dag.add_node(node)
recursionlimit = sys.getrecursionlimit()
sys.setrecursionlimit(max(self.graph.v() * 2, recursionlimit))
def run(self, source=None, pre_action=None, post_action=None):
"""Executable pseudocode."""
if source is not None:
self.parent[source] = None # before _visit
self._visit(source, pre_action, post_action)
else:
for node in self.graph.iternodes():
if node not in self.parent:
self.parent[node] = None # before _visit
self._visit(node, pre_action, post_action)
def _visit(self, node, pre_action=None, post_action=None):
"""Explore recursively the connected component."""
if pre_action:
pre_action(node)
for edge in self.graph.iteroutedges(node):
if edge.target not in self.parent:
self.parent[edge.target] = node # before _visit
self.dag.add_edge(edge)
self._visit(edge.target, pre_action, post_action)
if post_action:
post_action(node)
def path(self, source, target):
"""Construct a path from source to target."""
if source == target:
return [source]
elif self.parent[target] is None:
raise ValueError("no path to target")
else:
return self.path(source, self.parent[target]) + [target]
# EOF
| 35.490975 | 79 | 0.580714 | 1,178 | 9,831 | 4.767402 | 0.129881 | 0.043269 | 0.025463 | 0.023148 | 0.903134 | 0.88515 | 0.863248 | 0.850962 | 0.850962 | 0.850962 | 0 | 0.004503 | 0.299766 | 9,831 | 276 | 80 | 35.619565 | 0.811302 | 0.375547 | 0 | 0.789474 | 0 | 0 | 0.018321 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090226 | false | 0 | 0.030075 | 0 | 0.18797 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
65597ec2827c3a08d9bb8df07310aa99b3fb2221 | 8,990 | py | Python | couch/tests/test_utils_merge.py | madron/django-couch | 21e4c3a0022bdb7cfaff017f72025afbf5220b3b | [
"MIT"
] | null | null | null | couch/tests/test_utils_merge.py | madron/django-couch | 21e4c3a0022bdb7cfaff017f72025afbf5220b3b | [
"MIT"
] | null | null | null | couch/tests/test_utils_merge.py | madron/django-couch | 21e4c3a0022bdb7cfaff017f72025afbf5220b3b | [
"MIT"
] | null | null | null | from django.test import SimpleTestCase
from ..utils import merge_schema
class UtilsTest(SimpleTestCase):
def test_merge_schema(self):
schema = dict(
couchtest=dict(
default=dict(
emptydb=dict(),
db=dict(
designs=dict(
testdesigndoc1=dict(
views=dict(
view1=dict(
map='// couchtest db doc1 view1 map',
reduce='// couchtest db doc1 view1 reduce',
),
view2=dict(
map='// couchtest db doc1 view2 map',
),
),
language='javascript',
),
testdesigndoc2=dict(
views=dict(
view1=dict(
map='// couchtest db doc2 view1 map',
),
),
),
),
index=dict(
testindexdoc=dict(
index1=dict(fields=['document_type']),
),
),
),
anotherdb=dict(
designs=dict(
testdesigndoc=dict(
language='javascript',
views=dict(
view=dict(
map='// couchtest anotherdb doc view map',
),
),
),
),
),
),
),
)
expected_schema = dict(
default=dict(
emptydb=dict(),
db=dict(
designs=dict(
couchtest_testdesigndoc1=dict(
views=dict(
view1=dict(
map='// couchtest db doc1 view1 map',
reduce='// couchtest db doc1 view1 reduce',
),
view2=dict(
map='// couchtest db doc1 view2 map',
),
),
language='javascript',
),
couchtest_testdesigndoc2=dict(
views=dict(
view1=dict(
map='// couchtest db doc2 view1 map',
),
),
),
),
index=dict(
couchtest_testindexdoc=dict(
index1=dict(fields=['document_type']),
),
),
),
anotherdb=dict(
designs=dict(
couchtest_testdesigndoc=dict(
language='javascript',
views=dict(
view=dict(
map='// couchtest anotherdb doc view map',
),
),
),
),
),
),
)
merged_schema = merge_schema(schema)
self.assertEqual(merged_schema, expected_schema)
def test_merge_schema_multiple_apps(self):
schema = dict(
couchtest1=dict(
default=dict(
db=dict(
designs=dict(testdesigndoc=dict(views=dict(view1=dict()))),
),
),
),
couchtest2=dict(
default=dict(
db=dict(
designs=dict(testdesigndoc=dict(views=dict(view1=dict()))),
),
),
),
)
expected_schema = dict(
default=dict(
db=dict(
designs=dict(
couchtest1_testdesigndoc=dict(views=dict(view1=dict())),
couchtest2_testdesigndoc=dict(views=dict(view1=dict())),
),
),
),
)
merged_schema = merge_schema(schema)
self.assertEqual(merged_schema, expected_schema)
def test_merge_schema_multiple_servers(self):
schema = dict(
app1=dict(
server1=dict(
db1=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
db2=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
),
server2=dict(
db1=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
db2=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
),
),
app2=dict(
server1=dict(
db1=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
db2=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
),
server2=dict(
db1=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
db2=dict(
designs=dict(
doc1=dict(views=dict(view1=dict())),
doc2=dict(views=dict(view1=dict())),
),
),
),
),
)
expected_schema = dict(
server1=dict(
db1=dict(
designs=dict(
app1_doc1=dict(views=dict(view1=dict())),
app1_doc2=dict(views=dict(view1=dict())),
app2_doc1=dict(views=dict(view1=dict())),
app2_doc2=dict(views=dict(view1=dict())),
),
),
db2=dict(
designs=dict(
app1_doc1=dict(views=dict(view1=dict())),
app1_doc2=dict(views=dict(view1=dict())),
app2_doc1=dict(views=dict(view1=dict())),
app2_doc2=dict(views=dict(view1=dict())),
),
),
),
server2=dict(
db1=dict(
designs=dict(
app1_doc1=dict(views=dict(view1=dict())),
app1_doc2=dict(views=dict(view1=dict())),
app2_doc1=dict(views=dict(view1=dict())),
app2_doc2=dict(views=dict(view1=dict())),
),
),
db2=dict(
designs=dict(
app1_doc1=dict(views=dict(view1=dict())),
app1_doc2=dict(views=dict(view1=dict())),
app2_doc1=dict(views=dict(view1=dict())),
app2_doc2=dict(views=dict(view1=dict())),
),
),
),
)
merged_schema = merge_schema(schema)
self.assertEqual(merged_schema, expected_schema)
| 38.09322 | 83 | 0.317686 | 556 | 8,990 | 5.046763 | 0.095324 | 0.134711 | 0.185317 | 0.256593 | 0.918746 | 0.913756 | 0.909123 | 0.875624 | 0.862794 | 0.832145 | 0 | 0.036936 | 0.590434 | 8,990 | 235 | 84 | 38.255319 | 0.725149 | 0 | 0 | 0.882609 | 0 | 0 | 0.042492 | 0 | 0 | 0 | 0 | 0 | 0.013043 | 1 | 0.013043 | false | 0 | 0.008696 | 0 | 0.026087 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
656c599ec4a3f3e4848aa1bcc133ed6c6432f371 | 65 | py | Python | demo/basic/if/1.py | phpython/phpython | 1dd7a4f36461eca1fbe04364fd05f2e08209a499 | [
"MIT"
] | 13 | 2017-09-03T17:33:14.000Z | 2022-03-16T00:38:32.000Z | demo/basic/if/1.py | phpython/phpython | 1dd7a4f36461eca1fbe04364fd05f2e08209a499 | [
"MIT"
] | 2 | 2017-10-09T11:33:11.000Z | 2019-01-08T17:45:28.000Z | demo/basic/if/1.py | phpython/phpython | 1dd7a4f36461eca1fbe04364fd05f2e08209a499 | [
"MIT"
] | 4 | 2019-01-08T15:33:33.000Z | 2020-09-28T15:17:08.000Z |
if True:
print "Hi!"
if True:
print "Hi!"
else:
print "AA" | 8.125 | 12 | 0.569231 | 11 | 65 | 3.363636 | 0.545455 | 0.324324 | 0.594595 | 0.702703 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.261538 | 65 | 8 | 13 | 8.125 | 0.770833 | 0 | 0 | 0.666667 | 0 | 0 | 0.123077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
6590fea3f6de80c4bee426929976f337c67c7db6 | 21,719 | py | Python | scheduler_sdk/model/inspection/template_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | 5 | 2019-07-31T04:11:05.000Z | 2021-01-07T03:23:20.000Z | webshell_sdk/model/inspection/template_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | webshell_sdk/model/inspection/template_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: template.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='template.proto',
package='inspection',
syntax='proto3',
serialized_options=_b('ZDgo.easyops.local/contracts/protorepo-models/easyops/model/inspection'),
serialized_pb=_b('\n\x0etemplate.proto\x12\ninspection\"\x8c\x07\n\x12InspectionTemplate\x12\x10\n\x08pluginId\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0c\n\x04memo\x18\x04 \x01(\t\x12I\n\x10summaryTemplates\x18\x05 \x01(\x0b\x32/.inspection.InspectionTemplate.SummaryTemplates\x12\x41\n\x0cmetricGroups\x18\x06 \x03(\x0b\x32+.inspection.InspectionTemplate.MetricGroups\x12\x0f\n\x07\x63reator\x18\x07 \x01(\t\x12\r\n\x05\x63time\x18\x08 \x01(\t\x1a\xee\x03\n\x10SummaryTemplates\x12R\n\x0cmetricGroups\x18\x01 \x03(\x0b\x32<.inspection.InspectionTemplate.SummaryTemplates.MetricGroups\x12H\n\x07metrics\x18\x02 \x03(\x0b\x32\x37.inspection.InspectionTemplate.SummaryTemplates.Metrics\x1a\x91\x01\n\x0cMetricGroups\x12\r\n\x05index\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\r\n\x05width\x18\x03 \x01(\x05\x12\x13\n\x0b\x64isplayType\x18\x04 \x01(\t\x12\x12\n\ntransposed\x18\x05 \x01(\x08\x12\n\n\x02id\x18\x06 \x01(\t\x12\x0c\n\x04name\x18\x07 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x08 \x01(\t\x1a\xa7\x01\n\x07Metrics\x12\x15\n\rmetricGroupId\x18\x01 \x01(\t\x12\r\n\x05index\x18\x02 \x01(\x05\x12\x0e\n\x06height\x18\x03 \x01(\x05\x12\r\n\x05width\x18\x04 \x01(\x05\x12\x12\n\nabscissaId\x18\x05 \x01(\t\x12\x14\n\x0c\x61\x62scissaName\x18\x06 \x01(\t\x12\x13\n\x0b\x64isplayType\x18\x07 \x01(\t\x12\n\n\x02id\x18\x08 \x01(\t\x12\x0c\n\x04name\x18\t \x01(\t\x1a\x9c\x01\n\x0cMetricGroups\x12\r\n\x05index\x18\x01 \x01(\x05\x12\x13\n\x0b\x64isplayType\x18\x02 \x01(\t\x12\x12\n\ntransposed\x18\x03 \x01(\x08\x12\x12\n\nabscissaId\x18\x04 \x01(\t\x12\x14\n\x0c\x61\x62scissaName\x18\x05 \x01(\t\x12\n\n\x02id\x18\x06 \x01(\t\x12\x0c\n\x04name\x18\x07 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x08 \x01(\tBFZDgo.easyops.local/contracts/protorepo-models/easyops/model/inspectionb\x06proto3')
)
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICGROUPS = _descriptor.Descriptor(
name='MetricGroups',
full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='height', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='width', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.width', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='displayType', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.displayType', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transposed', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.transposed', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.name', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category', full_name='inspection.InspectionTemplate.SummaryTemplates.MetricGroups.category', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=465,
serialized_end=610,
)
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICS = _descriptor.Descriptor(
name='Metrics',
full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='metricGroupId', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.metricGroupId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.index', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='height', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.height', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='width', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.width', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abscissaId', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.abscissaId', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abscissaName', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.abscissaName', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='displayType', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.displayType', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.id', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='inspection.InspectionTemplate.SummaryTemplates.Metrics.name', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=613,
serialized_end=780,
)
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES = _descriptor.Descriptor(
name='SummaryTemplates',
full_name='inspection.InspectionTemplate.SummaryTemplates',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='metricGroups', full_name='inspection.InspectionTemplate.SummaryTemplates.metricGroups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metrics', full_name='inspection.InspectionTemplate.SummaryTemplates.metrics', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICGROUPS, _INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=286,
serialized_end=780,
)
_INSPECTIONTEMPLATE_METRICGROUPS = _descriptor.Descriptor(
name='MetricGroups',
full_name='inspection.InspectionTemplate.MetricGroups',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='inspection.InspectionTemplate.MetricGroups.index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='displayType', full_name='inspection.InspectionTemplate.MetricGroups.displayType', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transposed', full_name='inspection.InspectionTemplate.MetricGroups.transposed', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abscissaId', full_name='inspection.InspectionTemplate.MetricGroups.abscissaId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abscissaName', full_name='inspection.InspectionTemplate.MetricGroups.abscissaName', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='inspection.InspectionTemplate.MetricGroups.id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='inspection.InspectionTemplate.MetricGroups.name', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='category', full_name='inspection.InspectionTemplate.MetricGroups.category', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=783,
serialized_end=939,
)
_INSPECTIONTEMPLATE = _descriptor.Descriptor(
name='InspectionTemplate',
full_name='inspection.InspectionTemplate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pluginId', full_name='inspection.InspectionTemplate.pluginId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='inspection.InspectionTemplate.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='inspection.InspectionTemplate.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memo', full_name='inspection.InspectionTemplate.memo', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='summaryTemplates', full_name='inspection.InspectionTemplate.summaryTemplates', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metricGroups', full_name='inspection.InspectionTemplate.metricGroups', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creator', full_name='inspection.InspectionTemplate.creator', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ctime', full_name='inspection.InspectionTemplate.ctime', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_INSPECTIONTEMPLATE_SUMMARYTEMPLATES, _INSPECTIONTEMPLATE_METRICGROUPS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=31,
serialized_end=939,
)
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICGROUPS.containing_type = _INSPECTIONTEMPLATE_SUMMARYTEMPLATES
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICS.containing_type = _INSPECTIONTEMPLATE_SUMMARYTEMPLATES
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES.fields_by_name['metricGroups'].message_type = _INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICGROUPS
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES.fields_by_name['metrics'].message_type = _INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICS
_INSPECTIONTEMPLATE_SUMMARYTEMPLATES.containing_type = _INSPECTIONTEMPLATE
_INSPECTIONTEMPLATE_METRICGROUPS.containing_type = _INSPECTIONTEMPLATE
_INSPECTIONTEMPLATE.fields_by_name['summaryTemplates'].message_type = _INSPECTIONTEMPLATE_SUMMARYTEMPLATES
_INSPECTIONTEMPLATE.fields_by_name['metricGroups'].message_type = _INSPECTIONTEMPLATE_METRICGROUPS
DESCRIPTOR.message_types_by_name['InspectionTemplate'] = _INSPECTIONTEMPLATE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
InspectionTemplate = _reflection.GeneratedProtocolMessageType('InspectionTemplate', (_message.Message,), {
'SummaryTemplates' : _reflection.GeneratedProtocolMessageType('SummaryTemplates', (_message.Message,), {
'MetricGroups' : _reflection.GeneratedProtocolMessageType('MetricGroups', (_message.Message,), {
'DESCRIPTOR' : _INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICGROUPS,
'__module__' : 'template_pb2'
# @@protoc_insertion_point(class_scope:inspection.InspectionTemplate.SummaryTemplates.MetricGroups)
})
,
'Metrics' : _reflection.GeneratedProtocolMessageType('Metrics', (_message.Message,), {
'DESCRIPTOR' : _INSPECTIONTEMPLATE_SUMMARYTEMPLATES_METRICS,
'__module__' : 'template_pb2'
# @@protoc_insertion_point(class_scope:inspection.InspectionTemplate.SummaryTemplates.Metrics)
})
,
'DESCRIPTOR' : _INSPECTIONTEMPLATE_SUMMARYTEMPLATES,
'__module__' : 'template_pb2'
# @@protoc_insertion_point(class_scope:inspection.InspectionTemplate.SummaryTemplates)
})
,
'MetricGroups' : _reflection.GeneratedProtocolMessageType('MetricGroups', (_message.Message,), {
'DESCRIPTOR' : _INSPECTIONTEMPLATE_METRICGROUPS,
'__module__' : 'template_pb2'
# @@protoc_insertion_point(class_scope:inspection.InspectionTemplate.MetricGroups)
})
,
'DESCRIPTOR' : _INSPECTIONTEMPLATE,
'__module__' : 'template_pb2'
# @@protoc_insertion_point(class_scope:inspection.InspectionTemplate)
})
_sym_db.RegisterMessage(InspectionTemplate)
_sym_db.RegisterMessage(InspectionTemplate.SummaryTemplates)
_sym_db.RegisterMessage(InspectionTemplate.SummaryTemplates.MetricGroups)
_sym_db.RegisterMessage(InspectionTemplate.SummaryTemplates.Metrics)
_sym_db.RegisterMessage(InspectionTemplate.MetricGroups)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 49.138009 | 1,854 | 0.752567 | 2,624 | 21,719 | 5.977134 | 0.071265 | 0.056108 | 0.045907 | 0.091813 | 0.832887 | 0.780094 | 0.752678 | 0.714996 | 0.678972 | 0.659398 | 0 | 0.037494 | 0.12321 | 21,719 | 441 | 1,855 | 49.249433 | 0.786116 | 0.02758 | 0 | 0.726161 | 1 | 0.002445 | 0.231134 | 0.189872 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.012225 | 0 | 0.012225 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
65a0beee11c96dd32534bfc2246415b81f31210b | 333 | py | Python | dashboard/views/_brother/__init__.py | beta-nu-theta-chi/ox-dashboard | 842d86a381f26159b2c5bad39a95169496832023 | [
"MIT"
] | null | null | null | dashboard/views/_brother/__init__.py | beta-nu-theta-chi/ox-dashboard | 842d86a381f26159b2c5bad39a95169496832023 | [
"MIT"
] | 70 | 2016-11-16T18:49:02.000Z | 2021-04-26T00:47:18.000Z | dashboard/views/_brother/__init__.py | beta-nu-theta-chi/ox-dashboard | 842d86a381f26159b2c5bad39a95169496832023 | [
"MIT"
] | null | null | null | from dashboard.views._brother._campus_groups import *
from dashboard.views._brother._events import *
from dashboard.views._brother._excuse import *
from dashboard.views._brother._general import *
from dashboard.views._brother._media import *
from dashboard.views._brother._pnm import *
from dashboard.views._brother._service import * | 47.571429 | 53 | 0.834835 | 43 | 333 | 6.116279 | 0.302326 | 0.346008 | 0.479087 | 0.665399 | 0.707224 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.081081 | 333 | 7 | 54 | 47.571429 | 0.859477 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
65b66aeb10dab8432e02449bac1157b71a9ed114 | 292 | py | Python | pymc3_models/__init__.py | pysr1/pymc3_models | 420660235b6d38e92f7de308437de2aa1a76a0f0 | [
"Apache-2.0"
] | null | null | null | pymc3_models/__init__.py | pysr1/pymc3_models | 420660235b6d38e92f7de308437de2aa1a76a0f0 | [
"Apache-2.0"
] | null | null | null | pymc3_models/__init__.py | pysr1/pymc3_models | 420660235b6d38e92f7de308437de2aa1a76a0f0 | [
"Apache-2.0"
] | null | null | null | from pymc3_models.models.HierarchicalLogisticRegression import HierarchicalLogisticRegression
from pymc3_models.models.LinearRegression import LinearRegression
from pymc3_models.models.LogisticRegression import LogisticRegression
from pymc3_models.models.NaiveBayes import GaussianNaiveBayes
| 58.4 | 93 | 0.917808 | 28 | 292 | 9.428571 | 0.321429 | 0.136364 | 0.227273 | 0.318182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014493 | 0.054795 | 292 | 4 | 94 | 73 | 0.942029 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
65c3bb247c091f8152f50f05d07a6f2404bf278f | 65,503 | py | Python | library/tests/test_nifcloud_fw.py | hunter9x/ansible-role-nifcloud | 340457631e834e2a8783d76bd089364b8842432f | [
"Apache-2.0"
] | 9 | 2017-01-18T01:45:46.000Z | 2017-09-19T03:25:43.000Z | library/tests/test_nifcloud_fw.py | hunter9x/ansible-role-nifcloud | 340457631e834e2a8783d76bd089364b8842432f | [
"Apache-2.0"
] | 43 | 2017-01-18T07:24:20.000Z | 2017-11-01T09:16:02.000Z | library/tests/test_nifcloud_fw.py | hunter9x/ansible-role-nifcloud | 340457631e834e2a8783d76bd089364b8842432f | [
"Apache-2.0"
] | 11 | 2017-01-18T02:44:20.000Z | 2017-10-19T09:33:02.000Z | # -*- coding: utf-8 -*-
# Copyright 2017 FUJITSU CLOUD TECHNOLOGIES LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import sys
import unittest
import xml.etree.ElementTree as etree
import mock
import nifcloud_fw
sys.path.append('.')
sys.path.append('..')
class TestNifcloud(unittest.TestCase):
def setUp(self):
self.mockModule = mock.MagicMock(
params=dict(
access_key='ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
secret_access_key='ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
endpoint='west-1.cp.cloud.nifty.com',
group_name='fw001',
description='test firewall',
availability_zone='west-11',
log_limit=100000,
state='present',
purge_ip_permissions=True,
authorize_in_bulk=False,
ip_permissions=[
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='ICMP',
cidr_ip='192.168.0.0/24',
),
dict(
in_out='IN',
ip_protocol='SSH',
cidr_ip='10.0.0.11',
),
dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
to_port=29999,
group_name='admin',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
],
),
fail_json=mock.MagicMock(side_effect=Exception('failed')),
exit_json=mock.MagicMock(side_effect=Exception('success')),
check_mode=False,
)
self.xmlnamespace = 'https://cp.cloud.nifty.com/api/'
self.xml = nifcloud_api_response_sample
self.result = dict(
absent=dict(
created=False,
changed_attributes=dict(),
state='absent',
),
present=dict(
created=False,
changed_attributes=dict(),
state='present',
),
)
self.security_group_info = dict(
group_name='fw001',
description=None,
log_limit=1000,
ip_permissions=[
dict(
in_out='OUT',
ip_protocol='HTTP',
cidr_ip='0.0.0.0/0',
),
dict(
in_out='OUT',
ip_protocol='TCP',
from_port=10000,
to_port=19999,
group_name='admin',
),
],
)
self.mockRequestsGetDescribeSecurityGroups = mock.MagicMock(
return_value=mock.MagicMock(
status_code=200,
text=self.xml['describeSecurityGroups']
))
self.mockRequestsGetDescribeSecurityGroupsDescriptionUnicode = mock.MagicMock( # noqa
return_value=mock.MagicMock(
status_code=200,
text=self.xml['describeSecurityGroupsDescriptionUnicode']
))
self.mockRequestsGetDescribeSecurityGroupsDescriptionNone = mock.MagicMock( # noqa
return_value=mock.MagicMock(
status_code=200,
text=self.xml['describeSecurityGroupsDescriptionNone']
))
self.mockRequestsGetDescribeSecurityGroupsProcessing = mock.MagicMock(
return_value=mock.MagicMock(
status_code=200,
text=self.xml['describeSecurityGroupsProcessing']
))
self.mockRequestsGetDescribeSecurityGroupsNotFound = mock.MagicMock(
return_value=mock.MagicMock(
status_code=200,
text=self.xml['describeSecurityGroupsNotFound']
))
self.mockRequestsPostCreateSecurityGroup = mock.MagicMock(
return_value=mock.MagicMock(
status_code=200,
text=self.xml['createSecurityGroup']
))
self.mockRequestsPostUpdateSecurityGroup = mock.MagicMock(
return_value=mock.MagicMock(
status_code=200,
text=self.xml['updateSecurityGroup']
))
self.mockRequestsPostAuthorizeSecurityGroup = mock.MagicMock(
return_value=mock.MagicMock(
status_code=200,
text=self.xml['authorizeSecurityGroup']
))
self.mockRequestsPostRevokeSecurityGroup = mock.MagicMock(
return_value=mock.MagicMock(
status_code=200,
text=self.xml['revokeSecurityGroup']
))
self.mockRequestsInternalServerError = mock.MagicMock(
return_value=mock.MagicMock(
status_code=500,
text=self.xml['internalServerError']
))
self.mockDescribeSecurityGroups = mock.MagicMock(
return_value=dict(
status=200,
xml_body=etree.fromstring(self.xml['describeSecurityGroups']),
xml_namespace=dict(nc=self.xmlnamespace)
))
self.mockNotFoundSecurityGroup = mock.MagicMock(
return_value=(
self.result['absent'],
None
))
self.mockDescribeSecurityGroup = mock.MagicMock(
return_value=(
self.result['present'],
self.security_group_info,
))
self.mockRequestsError = mock.MagicMock(return_value=None)
patcher = mock.patch('time.sleep')
self.addCleanup(patcher.stop)
self.mock_time_sleep = patcher.start()
# calculate signature
def test_calculate_signature(self):
secret_access_key = self.mockModule.params['secret_access_key']
method = 'GET'
endpoint = self.mockModule.params['endpoint']
path = '/api/'
params = dict(
Action='DescribeSecurityGroups',
AccessKeyId=self.mockModule.params['access_key'],
SignatureMethod='HmacSHA256',
SignatureVersion='2',
GroupName=self.mockModule.params['group_name'],
)
signature = nifcloud_fw.calculate_signature(
secret_access_key,
method,
endpoint,
path,
params
)
self.assertEqual(signature,
b'+05Mgbw/WCN+U6euoFzHIyFi8i9UUTGg1uiNHqYcu38=')
# calculate signature with string parameter including slash
def test_calculate_signature_with_slash(self):
secret_access_key = self.mockModule.params['secret_access_key']
method = 'GET'
endpoint = self.mockModule.params['endpoint']
path = '/api/'
params = dict(
Action='DescribeSecurityGroups',
AccessKeyId=self.mockModule.params['access_key'],
SignatureMethod='HmacSHA256',
SignatureVersion='2',
GroupName=self.mockModule.params['group_name'],
GroupDescription='/'
)
signature = nifcloud_fw.calculate_signature(
secret_access_key,
method,
endpoint,
path,
params
)
# This constant string is signature calculated by
# "library/tests/files/calculate_signature_sample.sh".
# This shell-script calculate with encoding a slash,
# like "nifcloud.calculate_signature()".
self.assertEqual(signature,
b'SsYPHOdKWpiniT39oGNJ5EjJum2gvqlUbozNxM9CSjE=')
# method get
def test_request_to_api_get(self):
method = 'GET'
action = 'DescribeSecurityGroups'
params = dict()
params["GroupName.1"] = self.mockModule.params['group_name']
with mock.patch('requests.get',
self.mockRequestsGetDescribeSecurityGroups):
info = nifcloud_fw.request_to_api(self.mockModule, method,
action, params)
self.assertEqual(info['status'], 200)
self.assertEqual(info['xml_namespace'], dict(nc=self.xmlnamespace))
self.assertEqual(
etree.tostring(info['xml_body']),
etree.tostring(
etree.fromstring(self.xml['describeSecurityGroups'])
)
)
# method post
def test_request_to_api_post(self):
method = 'POST'
action = 'CreateSecurityGroup'
params = dict(
GroupName=self.mockModule.params['group_name'],
)
with mock.patch('requests.post',
self.mockRequestsPostCreateSecurityGroup):
info = nifcloud_fw.request_to_api(self.mockModule, method,
action, params)
self.assertEqual(info['status'], 200)
self.assertEqual(info['xml_namespace'], dict(nc=self.xmlnamespace))
self.assertEqual(
etree.tostring(info['xml_body']),
etree.tostring(etree.fromstring(self.xml['createSecurityGroup']))
)
# api error
def test_request_to_api_error(self):
method = 'GET'
action = 'DescribeSecurityGroups'
params = dict()
params["GroupName.1"] = self.mockModule.params['group_name']
with mock.patch('requests.get', self.mockRequestsInternalServerError):
info = nifcloud_fw.request_to_api(self.mockModule, method,
action, params)
self.assertEqual(info['status'], 500)
self.assertEqual(
etree.tostring(info['xml_body']),
etree.tostring(etree.fromstring(self.xml['internalServerError']))
)
# method failed
def test_request_to_api_unknown(self):
method = 'UNKNOWN'
action = 'DescribeSecurityGroups'
params = dict()
params["GroupName.1"] = self.mockModule.params['group_name']
self.assertRaises(
Exception,
nifcloud_fw.request_to_api,
(self.mockModule, method, action, params)
)
# network error
def test_request_to_api_request_error(self):
method = 'GET'
action = 'DescribeSecurityGroups'
params = dict()
params["GroupName.1"] = self.mockModule.params['group_name']
with mock.patch('requests.get', self.mockRequestsError):
self.assertRaises(
Exception,
nifcloud_fw.request_to_api,
(self.mockModule, method, action, params)
)
# get api error code & message
def test_get_api_error(self):
method = 'GET'
action = 'DescribeSecurityGroups'
params = dict()
params["GroupName.1"] = self.mockModule.params['group_name']
with mock.patch('requests.get', self.mockRequestsInternalServerError):
info = nifcloud_fw.request_to_api(self.mockModule, method,
action, params)
error_info = nifcloud_fw.get_api_error(info['xml_body'])
self.assertEqual(error_info['code'], 'Server.InternalError')
self.assertEqual(error_info['message'],
'An error has occurred. Please try again later.')
# throw failed
def test_fail(self):
with self.assertRaises(Exception) as cm:
nifcloud_fw.fail(
self.mockModule,
self.result['absent'],
'error message',
group_name='fw001'
)
self.assertEqual(str(cm.exception), 'failed')
# contains_ip_permissions true case 1
def test_contains_ip_permissions_true_case_1(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description=None,
from_port=None,
to_port=None,
group_name=None,
)
self.assertTrue(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions true case 2
def test_contains_ip_permissions_true_case_2(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
to_port=29999,
group_name='admin',
description='dummy',
cidr_ip=None,
)
self.assertTrue(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions true case 3
def test_contains_ip_permissions_true_case_3(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
to_port=20000,
group_name='admin',
),
]
ip_permission = dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
to_port=None,
group_name='admin',
description='dummy',
cidr_ip=None,
)
self.assertTrue(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions true case 4
def test_contains_ip_permissions_true_case_4(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
group_name='admin',
),
]
ip_permission = dict(
in_out='IN',
ip_protocol='UDP',
from_port=20000,
to_port=20000,
group_name='admin',
description='dummy',
cidr_ip=None,
)
self.assertTrue(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# has_ip_permission false case 1
def test_contains_ip_permissions_false_case_1(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='IN',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
)
self.assertFalse(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions false case 2
def test_contains_ip_permissions_false_case_2(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='OUT',
ip_protocol='ICMP',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
)
self.assertFalse(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions false case 3
def test_contains_ip_permissions_false_case_3(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='OUT',
ip_protocol='ALL',
cidr_ip='10.0.0.0/16',
description='all outgoing protocols are allow',
)
self.assertFalse(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions false case 4
def test_contains_ip_permissions_false_case_4(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='IN',
ip_protocol='TCP',
from_port=10000,
to_port=29999,
group_name='admin',
)
self.assertFalse(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions false case 5
def test_contains_ip_permissions_false_case_5(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=30000,
group_name='admin',
)
self.assertFalse(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# contains_ip_permissions false case 6
def test_contains_ip_permissions_false_case_6(self):
ip_permissions = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permission = dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='default',
)
self.assertFalse(
nifcloud_fw.contains_ip_permissions(ip_permissions, ip_permission)
)
# except_ip_permissions case 1
def test_except_ip_permissions_case_1(self):
ip_permissions_a = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permissions_b = []
self.assertEqual(
nifcloud_fw.except_ip_permissions(ip_permissions_a,
ip_permissions_b),
ip_permissions_a
)
# except_ip_permissions case 2
def test_except_ip_permissions_case_2(self):
ip_permissions_a = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permissions_b = ip_permissions_a
self.assertEqual(
nifcloud_fw.except_ip_permissions(ip_permissions_a,
ip_permissions_b),
[]
)
# except_ip_permissions case 3
def test_except_ip_permissions_case_3(self):
ip_permissions_a = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
ip_permissions_b = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
]
self.assertEqual(
nifcloud_fw.except_ip_permissions(ip_permissions_a,
ip_permissions_b),
[
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
)
# except_ip_permissions case 4
def test_except_ip_permissions_case_4(self):
ip_permissions_a = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
]
ip_permissions_b = [
dict(
in_out='OUT',
ip_protocol='ANY',
cidr_ip='0.0.0.0/0',
description='all outgoing protocols are allow',
),
dict(
in_out='IN',
ip_protocol='TCP',
from_port=20000,
to_port=29999,
group_name='admin',
),
]
self.assertEqual(
nifcloud_fw.except_ip_permissions(ip_permissions_a,
ip_permissions_b),
[]
)
# describe present
def test_describe_security_group_present(self):
with mock.patch('requests.get',
self.mockRequestsGetDescribeSecurityGroups):
(result, info) = nifcloud_fw.describe_security_group(
self.mockModule,
self.result['absent']
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(),
state='present',
))
self.assertIsInstance(info, dict)
self.assertEqual(info['group_name'], 'fw001')
self.assertIsInstance(info['description'], bytes)
self.assertEqual(info['description'], b'sample fw')
self.assertEqual(info['log_limit'], 100000)
self.assertEqual(info['ip_permissions'], [
dict(
ip_protocol='TCP',
in_out='IN',
from_port=10000,
to_port=10010,
cidr_ip=None,
group_name='fw002',
),
dict(
ip_protocol='ANY',
in_out='OUT',
from_port=None,
to_port=None,
cidr_ip='0.0.0.0/0',
group_name=None,
),
])
# describe present description unicode
def test_describe_security_group_description_unicode(self):
with mock.patch(
'requests.get',
self.mockRequestsGetDescribeSecurityGroupsDescriptionUnicode
):
(result, info) = nifcloud_fw.describe_security_group(
self.mockModule,
self.result['absent']
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(),
state='present',
))
self.assertIsInstance(info, dict)
self.assertIsInstance(info['description'], bytes)
self.assertEqual(info['description'], u'サンプルFW'.encode('utf-8'))
# describe present description none
def test_describe_security_group_description_none(self):
with mock.patch(
'requests.get',
self.mockRequestsGetDescribeSecurityGroupsDescriptionNone
):
(result, info) = nifcloud_fw.describe_security_group(
self.mockModule,
self.result['absent']
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(),
state='present',
))
self.assertIsInstance(info, dict)
self.assertIsInstance(info['description'], str)
self.assertEqual(info['description'], '')
# describe processing
def test_describe_security_group_processing(self):
with mock.patch(
'requests.get',
self.mockRequestsGetDescribeSecurityGroupsProcessing
):
(result, info) = nifcloud_fw.describe_security_group(
self.mockModule,
self.result['absent']
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(),
state='processing',
))
self.assertIsNone(info)
# describe absent
def test_describe_security_group_absent(self):
with mock.patch(
'requests.get',
self.mockRequestsGetDescribeSecurityGroupsNotFound
):
(result, info) = nifcloud_fw.describe_security_group(
self.mockModule,
self.result['absent']
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(),
state='absent',
))
self.assertIsNone(info)
# describe failed
def test_describe_security_group_failed(self):
with mock.patch('requests.get', self.mockRequestsInternalServerError):
(result, info) = nifcloud_fw.describe_security_group(
self.mockModule,
self.result['absent']
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(),
state='absent',
))
self.assertIsNone(info)
# wait_for_processing success absent
def test_wait_for_processing_success_absent(self):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockNotFoundSecurityGroup
):
(result, info) = nifcloud_fw.wait_for_processing(
self.mockModule,
self.result['absent'],
'absent'
)
self.assertEqual(result, self.result['absent'])
self.assertIsNone(info)
# wait_for_processing success present
def test_wait_for_processing_success_present(self):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockDescribeSecurityGroup
):
(result, info) = nifcloud_fw.wait_for_processing(
self.mockModule,
self.result['absent'],
'present'
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, self.security_group_info)
# wait_for_processing unmatch absent
def test_wait_for_processing_failed_absent(self):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockDescribeSecurityGroup
):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.wait_for_processing(
self.mockModule,
self.result['absent'],
'absent'
)
self.assertEqual(str(cm.exception), 'failed')
# wait_for_processing unmatch present
def test_wait_for_processing_failed_present(self):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockNotFoundSecurityGroup
):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.wait_for_processing(
self.mockModule,
self.result['absent'],
'present'
)
self.assertEqual(str(cm.exception), 'failed')
# create present * do nothing
def test_create_security_group_skip(self):
(result, info) = nifcloud_fw.create_security_group(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, self.security_group_info)
# create success
def test_create_security_group_success(self):
with mock.patch(
'requests.post',
self.mockRequestsPostCreateSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockDescribeSecurityGroup
):
(result, info) = nifcloud_fw.create_security_group(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(result, dict(
created=True,
changed_attributes=dict(),
state='present',
))
self.assertEqual(info, self.security_group_info)
# create(check_mode) * do nothing
def test_create_security_group_check_mode(self):
mock_module = mock.MagicMock(
params=copy.deepcopy(self.mockModule.params),
check_mode=True,
)
empty_security_group_info = None
(result, info) = nifcloud_fw.create_security_group(
mock_module,
self.result['present'],
empty_security_group_info
)
self.assertEqual(result, dict(
created=True,
changed_attributes=dict(),
state='present',
))
self.assertEqual(info, empty_security_group_info)
# create failed
def test_create_security_group_failed(self):
with mock.patch(
'requests.post',
self.mockRequestsPostCreateSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockNotFoundSecurityGroup
):
with self.assertRaises(Exception) as cm:
nifcloud_fw.create_security_group(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(str(cm.exception), 'failed')
# create request failed
def test_create_security_group_request_failed(self):
with mock.patch('requests.post', self.mockRequestsInternalServerError):
with self.assertRaises(Exception) as cm:
nifcloud_fw.create_security_group(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(str(cm.exception), 'failed')
# update api success
def test_update_security_group_attribute_success(self):
params = dict(
GroupName=self.mockModule.params['group_name'],
GroupDescriptionUpdate=self.mockModule.params['description'],
)
with mock.patch(
'requests.post',
self.mockRequestsPostUpdateSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockDescribeSecurityGroup
):
(result, info) = nifcloud_fw.update_security_group_attribute(
self.mockModule,
self.result['present'],
self.security_group_info,
params
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, self.security_group_info)
# update api absent * do nothing
def test_update_security_group_attribute_absent(self):
params = dict(
GroupName=self.mockModule.params['group_name'],
GroupDescriptionUpdate=self.mockModule.params['description'],
)
(result, info) = nifcloud_fw.update_security_group_attribute(
self.mockModule,
self.result['absent'],
None,
params
)
self.assertEqual(result, self.result['absent'])
self.assertIsNone(info)
# update api failed
def test_update_security_group_attribute_failed(self):
params = dict(
GroupName=self.mockModule.params['group_name'],
GroupDescriptionUpdate=self.mockModule.params['description'],
)
with mock.patch(
'requests.post',
self.mockRequestsPostUpdateSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockNotFoundSecurityGroup
):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.update_security_group_attribute( # noqa
self.mockModule,
self.result['present'],
self.security_group_info,
params
)
self.assertEqual(str(cm.exception), 'failed')
# update api request failed
def test_update_security_group_attribute_request_failed(self):
params = dict(
GroupName=self.mockModule.params['group_name'],
GroupDescriptionUpdate=self.mockModule.params['description'],
)
with mock.patch('requests.post', self.mockRequestsInternalServerError):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.update_security_group_attribute(
self.mockModule,
self.result['present'],
self.security_group_info,
params
)
self.assertEqual(str(cm.exception), 'failed')
# update description success
def test_update_security_group_description_success(self):
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
description=self.mockModule.params['description'],
)
mock_describe_security_group = mock.MagicMock(
return_value=(
self.result['present'],
changed_security_group_info,
))
with mock.patch(
'nifcloud_fw.update_security_group_attribute',
mock_describe_security_group
):
(result, info) = nifcloud_fw.update_security_group_description(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
description=self.mockModule.params['description'],
),
state='present',
))
self.assertEqual(info, changed_security_group_info)
# update description absent * do nothing
def test_update_security_group_description_absent(self):
(result, info) = nifcloud_fw.update_security_group_description(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(result, self.result['absent'])
self.assertIsNone(info)
# update description is None * do nothing
def test_update_security_group_description_none(self):
security_group_info = dict(
copy.deepcopy(self.security_group_info),
description=self.mockModule.params['description'],
)
mock_module = mock.MagicMock(
params=dict(
copy.deepcopy(self.mockModule.params),
description=None,
),
check_mode=False,
)
(result, info) = nifcloud_fw.update_security_group_description(
mock_module,
self.result['present'],
security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, security_group_info)
# update description is no change * do nothing
def test_update_security_group_description_skip(self):
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
description=self.mockModule.params['description'],
)
(result, info) = nifcloud_fw.update_security_group_description(
self.mockModule,
self.result['present'],
changed_security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, changed_security_group_info)
# update description (check_mode) is no change * do nothing
def test_update_security_group_description_check_mode(self):
mock_module = mock.MagicMock(
params=copy.deepcopy(self.mockModule.params),
check_mode=True,
)
(result, info) = nifcloud_fw.update_security_group_description(
mock_module,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
description=self.mockModule.params['description'],
),
state='present',
))
self.assertEqual(info, self.security_group_info)
# update description failed
def test_update_security_group_description_failed(self):
with mock.patch(
'nifcloud_fw.update_security_group_attribute',
self.mockDescribeSecurityGroup
):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.update_security_group_description(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(str(cm.exception), 'failed')
# update log_limit success
def test_update_security_group_log_limit_success(self):
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
log_limit=self.mockModule.params['log_limit'],
)
mock_describe_security_group = mock.MagicMock(
return_value=(
self.result['present'],
changed_security_group_info,
))
with mock.patch(
'nifcloud_fw.update_security_group_attribute',
mock_describe_security_group
):
(result, info) = nifcloud_fw.update_security_group_log_limit(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
log_limit=self.mockModule.params['log_limit'],
),
state='present',
))
self.assertEqual(info, changed_security_group_info)
# update log_limit absent * do nothing
def test_update_security_group_log_limit_absent(self):
(result, info) = nifcloud_fw.update_security_group_log_limit(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(result, self.result['absent'])
self.assertIsNone(info)
# update log_limit is None * do nothing
def test_update_security_group_log_limit_none(self):
security_group_info = dict(
copy.deepcopy(self.security_group_info),
log_limit=self.mockModule.params['description'],
)
mock_module = mock.MagicMock(
params=dict(
copy.deepcopy(self.mockModule.params),
log_limit=None,
),
check_mode=False,
)
(result, info) = nifcloud_fw.update_security_group_log_limit(
mock_module,
self.result['present'],
security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, security_group_info)
# update log_limit is no change * do nothing
def test_update_security_group_log_limit_skip(self):
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
log_limit=self.mockModule.params['log_limit'],
)
(result, info) = nifcloud_fw.update_security_group_log_limit(
self.mockModule,
self.result['present'],
changed_security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, changed_security_group_info)
# update log_limit (check_mode) is no change * do nothing
def test_update_security_group_log_limit_check_mode(self):
mock_module = mock.MagicMock(
params=copy.deepcopy(self.mockModule.params),
check_mode=True,
)
(result, info) = nifcloud_fw.update_security_group_log_limit(
mock_module,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
log_limit=self.mockModule.params['log_limit'],
),
state='present',
))
self.assertEqual(info, self.security_group_info)
# update log_limit failed
def test_update_security_group_log_limit_failed(self):
with mock.patch(
'nifcloud_fw.update_security_group_attribute',
self.mockDescribeSecurityGroup
):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.update_security_group_log_limit(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(str(cm.exception), 'failed')
# update
def test_update_security_group(self):
with mock.patch(
'nifcloud_fw.update_security_group_description',
self.mockDescribeSecurityGroup
):
with mock.patch(
'nifcloud_fw.update_security_group_log_limit',
self.mockDescribeSecurityGroup
):
(result, info) = nifcloud_fw.update_security_group(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, self.security_group_info)
# update absent * do nothing
def test_update_security_group_absent(self):
(result, info) = nifcloud_fw.update_security_group(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(result, self.result['absent'])
self.assertIsNone(info)
# authorize success
def test_authorize_security_group_success(self):
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=list(
self.security_group_info['ip_permissions'] +
self.mockModule.params['ip_permissions'],
)
)
mock_describe_security_group = mock.MagicMock(
return_value=(
self.result['present'],
changed_security_group_info,
))
with mock.patch(
'requests.post',
self.mockRequestsPostAuthorizeSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
mock_describe_security_group
):
(result, info) = nifcloud_fw.authorize_security_group(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
number_of_authorize_rules=len(
self.mockModule.params['ip_permissions']
),
),
state='present',
))
self.assertEqual(info, changed_security_group_info)
# authorize(bulk) success
def test_authorize_security_group_bulk_success(self):
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=list(
self.security_group_info['ip_permissions'] +
self.mockModule.params['ip_permissions'],
),
)
mock_module = mock.MagicMock(
params=dict(
copy.deepcopy(self.mockModule.params),
authorize_in_bulk=True,
),
check_mode=False,
)
mock_describe_security_group = mock.MagicMock(
return_value=(
self.result['present'],
changed_security_group_info,
))
with mock.patch(
'requests.post',
self.mockRequestsPostAuthorizeSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
mock_describe_security_group
):
(result, info) = nifcloud_fw.authorize_security_group(
mock_module,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
number_of_authorize_rules=len(
self.mockModule.params['ip_permissions']
),
),
state='present',
))
self.assertEqual(info, changed_security_group_info)
# authorize ip_permissions are no change * do nothing
def test_authorize_security_group_skip(self):
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=self.mockModule.params['ip_permissions'],
)
(result, info) = nifcloud_fw.authorize_security_group(
self.mockModule,
self.result['present'],
changed_security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, changed_security_group_info)
# authorize absent * do nothing
def test_authorize_security_group_absent(self):
(result, info) = nifcloud_fw.authorize_security_group(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(result, self.result['absent'])
self.assertIsNone(info)
# authorize ip_permissions(check_mode) are no change * do nothing
def test_authorize_security_group_check_mode(self):
mock_module = mock.MagicMock(
params=copy.deepcopy(self.mockModule.params),
check_mode=True,
)
(result, info) = nifcloud_fw.authorize_security_group(
mock_module,
self.result['present'],
self.security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
number_of_authorize_rules=len(
self.mockModule.params['ip_permissions']
),
),
state='present',
))
self.assertEqual(info, self.security_group_info)
# authorize failed
def test_authorize_security_group_failed(self):
with mock.patch(
'requests.post',
self.mockRequestsPostAuthorizeSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockDescribeSecurityGroup
):
with self.assertRaises(Exception) as cm:
nifcloud_fw.authorize_security_group(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(str(cm.exception), 'failed')
# authorize request failed
def test_authorize_security_group_request_failed(self):
with mock.patch('requests.post', self.mockRequestsInternalServerError):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.authorize_security_group(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(str(cm.exception), 'failed')
# revoke success
def test_revoke_security_group_success(self):
security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=list(
self.security_group_info['ip_permissions'] +
self.mockModule.params['ip_permissions'],
),
)
changed_security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=self.mockModule.params['ip_permissions'],
)
mock_describe_security_group = mock.MagicMock(
return_value=(
self.result['present'],
changed_security_group_info,
))
with mock.patch(
'requests.post',
self.mockRequestsPostRevokeSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
mock_describe_security_group
):
(result, info) = nifcloud_fw.revoke_security_group(
self.mockModule,
self.result['present'],
security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
number_of_revoke_rules=len(
self.security_group_info['ip_permissions']
),
),
state='present',
))
self.assertEqual(info, changed_security_group_info)
# revoke ip_permissions are no change * do nothing
def test_revoke_security_group_skip(self):
security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=self.mockModule.params['ip_permissions'],
)
(result, info) = nifcloud_fw.revoke_security_group(
self.mockModule,
self.result['present'],
security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, security_group_info)
# revoke ip_permissions are setted no purge_rules * do nothing
def test_revoke_security_group_purge_skip(self):
security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=self.mockModule.params['ip_permissions'],
)
mock_ip_permissions_removed = self.mockModule
mock_ip_permissions_removed.params['ip_permissions'] = []
mock_ip_permissions_removed.params['purge_ip_permissions'] = False
(result, info) = nifcloud_fw.revoke_security_group(
mock_ip_permissions_removed,
self.result['present'],
security_group_info
)
self.assertEqual(result, self.result['present'])
self.assertEqual(info, security_group_info)
# revoke absent * do nothing
def test_revoke_security_group_absent(self):
(result, info) = nifcloud_fw.revoke_security_group(
self.mockModule,
self.result['absent'],
None
)
self.assertEqual(result, self.result['absent'])
self.assertIsNone(info)
# revoke ip_permissions(check_mode) are no change * do nothing
def test_revoke_security_group_check_mode(self):
mock_module = mock.MagicMock(
params=copy.deepcopy(self.mockModule.params),
check_mode=True,
)
security_group_info = dict(
copy.deepcopy(self.security_group_info),
ip_permissions=list(
self.security_group_info['ip_permissions'] +
self.mockModule.params['ip_permissions'],
),
)
(result, info) = nifcloud_fw.revoke_security_group(
mock_module,
self.result['present'],
security_group_info
)
self.assertEqual(result, dict(
created=False,
changed_attributes=dict(
number_of_revoke_rules=len(
self.security_group_info['ip_permissions']
),
),
state='present',
))
self.assertEqual(info, security_group_info)
# revoke failed
def test_revoke_security_group_failed(self):
with mock.patch(
'requests.post',
self.mockRequestsPostRevokeSecurityGroup
):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockDescribeSecurityGroup
):
with self.assertRaises(Exception) as cm:
nifcloud_fw.revoke_security_group(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(str(cm.exception), 'failed')
# revoke request failed
def test_revoke_security_group_request_failed(self):
with mock.patch('requests.post', self.mockRequestsInternalServerError):
with self.assertRaises(Exception) as cm:
(result, info) = nifcloud_fw.revoke_security_group(
self.mockModule,
self.result['present'],
self.security_group_info
)
self.assertEqual(str(cm.exception), 'failed')
# run success (absent - create -> present - other action -> present)
def test_run_success_absent(self):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockNotFoundSecurityGroup
):
with mock.patch(
'nifcloud_fw.create_security_group',
self.mockDescribeSecurityGroup
):
with mock.patch(
'nifcloud_fw.update_security_group',
self.mockDescribeSecurityGroup):
with mock.patch(
'nifcloud_fw.authorize_security_group',
self.mockDescribeSecurityGroup
):
with mock.patch(
'nifcloud_fw.revoke_security_group',
self.mockDescribeSecurityGroup
):
with self.assertRaises(Exception) as cm:
nifcloud_fw.run(self.mockModule)
self.assertEqual(str(cm.exception), 'success')
# run success (present - create skip -> present - other action -> present)
def test_run_success_present(self):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockDescribeSecurityGroup
):
with mock.patch(
'nifcloud_fw.update_security_group',
self.mockDescribeSecurityGroup
):
with mock.patch(
'nifcloud_fw.authorize_security_group',
self.mockDescribeSecurityGroup
):
with mock.patch(
'nifcloud_fw.revoke_security_group',
self.mockDescribeSecurityGroup
):
with self.assertRaises(Exception) as cm:
nifcloud_fw.run(self.mockModule)
self.assertEqual(str(cm.exception), 'success')
# run failed (absent - create -> absent - skip other action -> absent)
def test_run_failed(self):
with mock.patch(
'nifcloud_fw.describe_security_group',
self.mockNotFoundSecurityGroup
):
with mock.patch(
'nifcloud_fw.create_security_group',
self.mockNotFoundSecurityGroup
):
with self.assertRaises(Exception) as cm:
nifcloud_fw.run(self.mockModule)
self.assertEqual(str(cm.exception), 'failed')
nifcloud_api_response_sample = dict(
describeSecurityGroups='''
<DescribeSecurityGroupsResponse xmlns="https://cp.cloud.nifty.com/api/">
<RequestID>5ec8da0a-6e23-4343-b474-ca0bb5c22a51</RequestID>
<securityGroupInfo>
<item>
<ownerId></ownerId>
<groupName>fw001</groupName>
<groupDescription>sample fw</groupDescription>
<groupStatus>applied</groupStatus>
<ipPermissions>
<item>
<ipProtocol>TCP</ipProtocol>
<fromPort>10000</fromPort>
<toPort>10010</toPort>
<inOut>IN</inOut>
<groups>
<item>
<groupName>fw002</groupName>
</item>
</groups>
<description>TCP (10000 - 10010)</description>
<addDatetime>2001-02-03T04:05:06.007Z</addDatetime>
</item>
<item>
<ipProtocol>ANY</ipProtocol>
<inOut>OUT</inOut>
<ipRanges>
<item>
<cidrIp>0.0.0.0/0</cidrIp>
</item>
</ipRanges>
<description>ANY</description>
<addDatetime>2001-02-03T04:05:06.007Z</addDatetime>
</item>
</ipPermissions>
<instancesSet>
<item>
<instanceId>sv001</instanceId>
</item>
<item>
<instanceId>sv002</instanceId>
</item>
</instancesSet>
<instanceUniqueIdsSet>
<item>
<instanceUniqueId>i-0a1b2c01</instanceUniqueId>
</item>
<item>
<instanceUniqueId>i-0a1b2c02</instanceUniqueId>
</item>
</instanceUniqueIdsSet>
<groupRuleLimit>100</groupRuleLimit>
<groupLogLimit>100000</groupLogLimit>
<groupLogFilterNetBios>true</groupLogFilterNetBios>
<groupLogFilterBroadcast>true</groupLogFilterBroadcast>
<availabilityZone>west-12</availabilityZone>
</item>
</securityGroupInfo>
</DescribeSecurityGroupsResponse>
''',
describeSecurityGroupsDescriptionUnicode=u'''
<DescribeSecurityGroupsResponse xmlns="https://cp.cloud.nifty.com/api/">
<RequestID>5ec8da0a-6e23-4343-b474-ca0bb5c22a51</RequestID>
<securityGroupInfo>
<item>
<ownerId></ownerId>
<groupName>fw002</groupName>
<groupDescription>サンプルFW</groupDescription>
<groupStatus>applied</groupStatus>
<ipPermissions />
<instancesSet />
<instanceUniqueIdsSet />
<groupRuleLimit>100</groupRuleLimit>
<groupLogLimit>1000</groupLogLimit>
<groupLogFilterNetBios>false</groupLogFilterNetBios>
<groupLogFilterBroadcast>false</groupLogFilterBroadcast>
<availabilityZone>west-12</availabilityZone>
</item>
</securityGroupInfo>
</DescribeSecurityGroupsResponse>
''',
describeSecurityGroupsDescriptionNone='''
<DescribeSecurityGroupsResponse xmlns="https://cp.cloud.nifty.com/api/">
<RequestID>5ec8da0a-6e23-4343-b474-ca0bb5c22a51</RequestID>
<securityGroupInfo>
<item>
<ownerId></ownerId>
<groupName>fw002</groupName>
<groupDescription />
<groupStatus>applied</groupStatus>
<ipPermissions />
<instancesSet />
<instanceUniqueIdsSet />
<groupRuleLimit>100</groupRuleLimit>
<groupLogLimit>1000</groupLogLimit>
<groupLogFilterNetBios>false</groupLogFilterNetBios>
<groupLogFilterBroadcast>false</groupLogFilterBroadcast>
<availabilityZone>west-12</availabilityZone>
</item>
</securityGroupInfo>
</DescribeSecurityGroupsResponse>
''',
describeSecurityGroupsProcessing='''
<DescribeSecurityGroupsResponse xmlns="https://cp.cloud.nifty.com/api/">
<RequestID>5ec8da0a-6e23-4343-b474-ca0bb5c22a51</RequestID>
<securityGroupInfo>
<item>
<ownerId></ownerId>
<groupName>fw002</groupName>
<groupDescription>Case No. 002</groupDescription>
<groupStatus>processing</groupStatus>
<ipPermissions />
<instancesSet />
<instanceUniqueIdsSet />
<groupRuleLimit>100</groupRuleLimit>
<groupLogLimit>1000</groupLogLimit>
<groupLogFilterNetBios>false</groupLogFilterNetBios>
<groupLogFilterBroadcast>false</groupLogFilterBroadcast>
<availabilityZone>west-12</availabilityZone>
</item>
</securityGroupInfo>
</DescribeSecurityGroupsResponse>
''',
describeSecurityGroupsNotFound='''
<DescribeSecurityGroupsResponse xmlns="https://cp.cloud.nifty.com/api/">
<RequestID>5ec8da0a-6e23-4343-b474-ca0bb5c22a51</RequestID>
<securityGroupInfo />
</DescribeSecurityGroupsResponse>
''',
createSecurityGroup='''
<CreateSecurityGroupResponse xmlns="https://cp.cloud.nifty.com/api/">
<requestId>320fc738-a1c7-4a2f-abcb-20813a4e997c</requestId>
<return>true</return>
</CreateSecurityGroupResponse>
''',
updateSecurityGroup='''
<UpdateSecurityGroupResponse xmlns="https://cp.cloud.nifty.com/api/">
<requestId>320fc738-a1c7-4a2f-abcb-20813a4e997c</requestId>
<return>true</return>
</UpdateSecurityGroupResponse>
''',
authorizeSecurityGroup='''
<AuthorizeSecurityGroupIngressResponse xmlns="https://cp.cloud.nifty.com/api/">
<requestId>320fc738-a1c7-4a2f-abcb-20813a4e997c</requestId>
<return>true</return>
</AuthorizeSecurityGroupIngressResponse>
''',
revokeSecurityGroup='''
<RevokeSecurityGroupIngressResponse xmlns="https://cp.cloud.nifty.com/api/">
<requestId>320fc738-a1c7-4a2f-abcb-20813a4e997c</requestId>
<return>true</return>
</RevokeSecurityGroupIngressResponse>
''',
internalServerError='''
<Response>
<Errors>
<Error>
<Code>Server.InternalError</Code>
<Message>An error has occurred. Please try again later.</Message>
</Error>
</Errors>
<RequestID>5ec8da0a-6e23-4343-b474-ca0bb5c22a51</RequestID>
</Response>
'''
)
if __name__ == '__main__':
unittest.main()
| 33.660329 | 94 | 0.558997 | 5,892 | 65,503 | 5.964358 | 0.061779 | 0.080644 | 0.044505 | 0.031672 | 0.848728 | 0.826305 | 0.795686 | 0.758892 | 0.734676 | 0.715753 | 0 | 0.020038 | 0.349358 | 65,503 | 1,945 | 95 | 33.677635 | 0.804524 | 0.047937 | 0 | 0.76597 | 0 | 0 | 0.162298 | 0.076701 | 0 | 0 | 0 | 0 | 0.082985 | 1 | 0.044179 | false | 0 | 0.003582 | 0 | 0.048358 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f316e1de8ed5dfbcb7093f01533ffe1da8c8cc58 | 3,755 | py | Python | tests/forms_tests/field_tests/test_multiplechoicefield.py | jpmallarino/django | 659d2421c7adbbcd205604002d521d82d6b0b465 | [
"BSD-3-Clause",
"0BSD"
] | 16 | 2019-08-10T12:24:06.000Z | 2020-05-21T09:11:14.000Z | tests/forms_tests/field_tests/test_multiplechoicefield.py | jpmallarino/django | 659d2421c7adbbcd205604002d521d82d6b0b465 | [
"BSD-3-Clause",
"0BSD"
] | 12 | 2019-08-10T11:55:29.000Z | 2020-05-21T04:46:30.000Z | tests/forms_tests/field_tests/test_multiplechoicefield.py | jpmallarino/django | 659d2421c7adbbcd205604002d521d82d6b0b465 | [
"BSD-3-Clause",
"0BSD"
] | 3 | 2019-08-20T13:29:34.000Z | 2020-01-30T22:05:10.000Z | from django.core.exceptions import ValidationError
from django.forms import MultipleChoiceField
from django.test import SimpleTestCase
class MultipleChoiceFieldTest(SimpleTestCase):
def test_multiplechoicefield_1(self):
f = MultipleChoiceField(choices=[("1", "One"), ("2", "Two")])
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean("")
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(None)
self.assertEqual(["1"], f.clean([1]))
self.assertEqual(["1"], f.clean(["1"]))
self.assertEqual(["1", "2"], f.clean(["1", "2"]))
self.assertEqual(["1", "2"], f.clean([1, "2"]))
self.assertEqual(["1", "2"], f.clean((1, "2")))
with self.assertRaisesMessage(ValidationError, "'Enter a list of values.'"):
f.clean("hello")
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean([])
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(())
msg = "'Select a valid choice. 3 is not one of the available choices.'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean(["3"])
def test_multiplechoicefield_2(self):
f = MultipleChoiceField(choices=[("1", "One"), ("2", "Two")], required=False)
self.assertEqual([], f.clean(""))
self.assertEqual([], f.clean(None))
self.assertEqual(["1"], f.clean([1]))
self.assertEqual(["1"], f.clean(["1"]))
self.assertEqual(["1", "2"], f.clean(["1", "2"]))
self.assertEqual(["1", "2"], f.clean([1, "2"]))
self.assertEqual(["1", "2"], f.clean((1, "2")))
with self.assertRaisesMessage(ValidationError, "'Enter a list of values.'"):
f.clean("hello")
self.assertEqual([], f.clean([]))
self.assertEqual([], f.clean(()))
msg = "'Select a valid choice. 3 is not one of the available choices.'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean(["3"])
def test_multiplechoicefield_3(self):
f = MultipleChoiceField(
choices=[
("Numbers", (("1", "One"), ("2", "Two"))),
("Letters", (("3", "A"), ("4", "B"))),
("5", "Other"),
]
)
self.assertEqual(["1"], f.clean([1]))
self.assertEqual(["1"], f.clean(["1"]))
self.assertEqual(["1", "5"], f.clean([1, 5]))
self.assertEqual(["1", "5"], f.clean([1, "5"]))
self.assertEqual(["1", "5"], f.clean(["1", 5]))
self.assertEqual(["1", "5"], f.clean(["1", "5"]))
msg = "'Select a valid choice. 6 is not one of the available choices.'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean(["6"])
msg = "'Select a valid choice. 6 is not one of the available choices.'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean(["1", "6"])
def test_multiplechoicefield_changed(self):
f = MultipleChoiceField(choices=[("1", "One"), ("2", "Two"), ("3", "Three")])
self.assertFalse(f.has_changed(None, None))
self.assertFalse(f.has_changed([], None))
self.assertTrue(f.has_changed(None, ["1"]))
self.assertFalse(f.has_changed([1, 2], ["1", "2"]))
self.assertFalse(f.has_changed([2, 1], ["1", "2"]))
self.assertTrue(f.has_changed([1, 2], ["1"]))
self.assertTrue(f.has_changed([1, 2], ["1", "3"]))
def test_disabled_has_changed(self):
f = MultipleChoiceField(choices=[("1", "One"), ("2", "Two")], disabled=True)
self.assertIs(f.has_changed("x", "y"), False)
| 47.531646 | 85 | 0.562716 | 440 | 3,755 | 4.759091 | 0.138636 | 0.08596 | 0.056829 | 0.200573 | 0.820439 | 0.788921 | 0.753582 | 0.753582 | 0.655205 | 0.61127 | 0 | 0.032616 | 0.23249 | 3,755 | 78 | 86 | 48.141026 | 0.693963 | 0 | 0 | 0.416667 | 0 | 0 | 0.143276 | 0 | 0 | 0 | 0 | 0 | 0.527778 | 1 | 0.069444 | false | 0 | 0.041667 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b8ad3f0531397e333064adafcb4899638e250b13 | 10,025 | py | Python | onnx/backend/test/case/node/sequence_map.py | wschin/onnx | fb1f72159f4030ce45503279e6f6b8a7fb103e0f | [
"MIT"
] | null | null | null | onnx/backend/test/case/node/sequence_map.py | wschin/onnx | fb1f72159f4030ce45503279e6f6b8a7fb103e0f | [
"MIT"
] | null | null | null | onnx/backend/test/case/node/sequence_map.py | wschin/onnx | fb1f72159f4030ce45503279e6f6b8a7fb103e0f | [
"MIT"
] | 1 | 2022-03-27T19:17:02.000Z | 2022-03-27T19:17:02.000Z | # SPDX-License-Identifier: Apache-2.0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
class SequenceMap(Base):
@staticmethod
def export_sequence_map_identity_1_sequence(): # type: () -> None
body = onnx.helper.make_graph(
[onnx.helper.make_node('Identity', ['in0'], ['out0'])],
'seq_map_body',
[onnx.helper.make_tensor_value_info(
'in0', onnx.TensorProto.FLOAT, ['N'])],
[onnx.helper.make_tensor_value_info(
'out0', onnx.TensorProto.FLOAT, ['M'])]
)
node = onnx.helper.make_node(
'SequenceMap',
inputs=['x'],
outputs=['y'],
body=body
)
x = [np.random.uniform(0.0, 1.0, 10).astype(np.float32)
for _ in range(3)]
y = x
input_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
]
output_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
]
expect(node, inputs=[x], outputs=[y],
input_type_protos=input_type_protos,
output_type_protos=output_type_protos,
name='test_sequence_map_identity_1_sequence')
@staticmethod
def export_sequence_map_identity_2_sequences(): # type: () -> None
body = onnx.helper.make_graph(
[onnx.helper.make_node('Identity', ['in0'], ['out0']),
onnx.helper.make_node('Identity', ['in1'], ['out1'])],
'seq_map_body',
[onnx.helper.make_tensor_value_info('in0', onnx.TensorProto.FLOAT, ['N']),
onnx.helper.make_tensor_value_info('in1', onnx.TensorProto.FLOAT, ['M'])],
[onnx.helper.make_tensor_value_info('out0', onnx.TensorProto.FLOAT, ['N']),
onnx.helper.make_tensor_value_info('out1', onnx.TensorProto.FLOAT, ['M'])]
)
node = onnx.helper.make_node(
'SequenceMap',
inputs=['x0', 'x1'],
outputs=['y0', 'y1'],
body=body
)
x0 = [np.random.uniform(0.0, 1.0, np.random.randint(
1, 10)).astype(np.float32) for _ in range(3)]
x1 = [np.random.uniform(0.0, 1.0, np.random.randint(
1, 10)).astype(np.float32) for _ in range(3)]
y0 = x0
y1 = x1
input_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['M'])),
]
output_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['M'])),
]
expect(node, inputs=[x0, x1], outputs=[y0, y1],
input_type_protos=input_type_protos,
output_type_protos=output_type_protos,
name='test_sequence_map_identity_2_sequences')
@staticmethod
def export_sequence_map_identity_1_sequence_1_tensor(): # type: () -> None
body = onnx.helper.make_graph(
[onnx.helper.make_node('Identity', ['in0'], ['out0']),
onnx.helper.make_node('Identity', ['in1'], ['out1'])],
'seq_map_body',
[onnx.helper.make_tensor_value_info('in0', onnx.TensorProto.FLOAT, ['N']),
onnx.helper.make_tensor_value_info('in1', onnx.TensorProto.FLOAT, ['M'])],
[onnx.helper.make_tensor_value_info(
'out0', onnx.TensorProto.FLOAT, ['N']),
onnx.helper.make_tensor_value_info(
'out1', onnx.TensorProto.FLOAT, ['M'])]
)
node = onnx.helper.make_node(
'SequenceMap',
inputs=['x0', 'x1'],
outputs=['y0', 'y1'],
body=body
)
x0 = [np.random.uniform(0.0, 1.0, np.random.randint(
1, 10)).astype(np.float32) for _ in range(3)]
x1 = np.random.uniform(0.0, 1.0, np.random.randint(
1, 10)).astype(np.float32)
y0 = x0
y1 = [x1 for _ in range(3)]
input_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['M']),
]
output_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['M'])),
]
expect(node, inputs=[x0, x1], outputs=[y0, y1],
input_type_protos=input_type_protos,
output_type_protos=output_type_protos,
name='test_sequence_map_identity_1_sequence_1_tensor')
@staticmethod
def export_sequence_map_add_2_sequences(): # type: () -> None
body = onnx.helper.make_graph(
[onnx.helper.make_node('Add', ['in0', 'in1'], ['out0'])],
'seq_map_body',
[onnx.helper.make_tensor_value_info('in0', onnx.TensorProto.FLOAT, ['N']),
onnx.helper.make_tensor_value_info('in1', onnx.TensorProto.FLOAT, ['N'])],
[onnx.helper.make_tensor_value_info(
'out0', onnx.TensorProto.FLOAT, ['N'])]
)
node = onnx.helper.make_node(
'SequenceMap',
inputs=['x0', 'x1'],
outputs=['y0'],
body=body
)
N = [np.random.randint(1, 10) for _ in range(3)]
x0 = [np.random.uniform(0.0, 1.0, N[k]).astype(np.float32)
for k in range(3)]
x1 = [np.random.uniform(0.0, 1.0, N[k]).astype(np.float32)
for k in range(3)]
y0 = [x0[k] + x1[k] for k in range(3)]
input_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
]
output_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
]
expect(node, inputs=[x0, x1], outputs=[y0],
input_type_protos=input_type_protos,
output_type_protos=output_type_protos,
name='test_sequence_map_add_2_sequences')
@staticmethod
def export_sequence_map_add_1_sequence_1_tensor(): # type: () -> None
body = onnx.helper.make_graph(
[onnx.helper.make_node('Add', ['in0', 'in1'], ['out0'])],
'seq_map_body',
[onnx.helper.make_tensor_value_info('in0', onnx.TensorProto.FLOAT, ['N']),
onnx.helper.make_tensor_value_info('in1', onnx.TensorProto.FLOAT, ['N'])],
[onnx.helper.make_tensor_value_info(
'out0', onnx.TensorProto.FLOAT, ['N'])]
)
node = onnx.helper.make_node(
'SequenceMap',
inputs=['x0', 'x1'],
outputs=['y0'],
body=body
)
x0 = [np.random.uniform(0.0, 1.0, 10).astype(np.float32) for k in range(3)]
x1 = np.random.uniform(0.0, 1.0, 10).astype(np.float32)
y0 = [x0[i] + x1 for i in range(3)]
input_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N']),
]
output_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['N'])),
]
expect(node, inputs=[x0, x1], outputs=[y0],
input_type_protos=input_type_protos,
output_type_protos=output_type_protos,
name='test_sequence_map_add_1_sequence_1_tensor')
@staticmethod
def export_sequence_map_extract_shapes(): # type: () -> None
body = onnx.helper.make_graph(
[onnx.helper.make_node('Shape', ['x'], ['shape'])],
'seq_map_body',
[onnx.helper.make_tensor_value_info('x', onnx.TensorProto.FLOAT, ['H', 'W', 'C'])],
[onnx.helper.make_tensor_value_info('shape', onnx.TensorProto.INT64, [3])]
)
node = onnx.helper.make_node(
'SequenceMap',
inputs=['in_seq'],
outputs=['shapes'],
body=body
)
shapes = [
np.array([40, 30, 3], dtype=np.int64),
np.array([20, 10, 3], dtype=np.int64),
np.array([10, 5, 3], dtype=np.int64),
]
x0 = [np.zeros(shape, dtype=np.float32) for shape in shapes]
input_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.FLOAT, ['H', 'W', 'C'])),
]
output_type_protos = [
onnx.helper.make_sequence_type_proto(
onnx.helper.make_tensor_type_proto(onnx.TensorProto.INT64, [3])),
]
expect(node, inputs=[x0], outputs=[shapes],
input_type_protos=input_type_protos,
output_type_protos=output_type_protos,
name='test_sequence_map_extract_shapes')
| 41.255144 | 95 | 0.574963 | 1,200 | 10,025 | 4.51 | 0.076667 | 0.133038 | 0.186253 | 0.133038 | 0.907058 | 0.901515 | 0.874538 | 0.857169 | 0.838877 | 0.815595 | 0 | 0.029884 | 0.285686 | 10,025 | 242 | 96 | 41.42562 | 0.725876 | 0.014963 | 0 | 0.60274 | 0 | 0 | 0.062639 | 0.023008 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027397 | false | 0 | 0.03653 | 0 | 0.068493 | 0.004566 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b22c47e6112459e432e9ef296484f67046e9bd40 | 35,335 | py | Python | tests/test_centralheatpumpsystem.py | marcelosalles/pyidf | c2f744211572b5e14e29522aac1421ba88addb0e | [
"Apache-2.0"
] | 19 | 2015-12-08T23:33:51.000Z | 2022-01-31T04:41:10.000Z | tests/test_centralheatpumpsystem.py | marcelosalles/pyidf | c2f744211572b5e14e29522aac1421ba88addb0e | [
"Apache-2.0"
] | 2 | 2019-10-04T10:57:00.000Z | 2021-10-01T06:46:17.000Z | tests/test_centralheatpumpsystem.py | marcelosalles/pyidf | c2f744211572b5e14e29522aac1421ba88addb0e | [
"Apache-2.0"
] | 7 | 2015-11-04T02:25:01.000Z | 2021-12-08T03:14:28.000Z | import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.plant_heating_and_cooling_equipment import CentralHeatPumpSystem
log = logging.getLogger(__name__)
class TestCentralHeatPumpSystem(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_centralheatpumpsystem(self):
pyidf.validation_level = ValidationLevel.error
obj = CentralHeatPumpSystem()
# alpha
var_name = "Name"
obj.name = var_name
# alpha
var_control_method = "SmartMixing"
obj.control_method = var_control_method
# alpha
var_cooling_loop_inlet_node_name = "Cooling Loop Inlet Node Name"
obj.cooling_loop_inlet_node_name = var_cooling_loop_inlet_node_name
# alpha
var_cooling_loop_outlet_node_name = "Cooling Loop Outlet Node Name"
obj.cooling_loop_outlet_node_name = var_cooling_loop_outlet_node_name
# alpha
var_source_loop_inlet_node_name = "Source Loop Inlet Node Name"
obj.source_loop_inlet_node_name = var_source_loop_inlet_node_name
# alpha
var_source_loop_outlet_node_name = "Source Loop Outlet Node Name"
obj.source_loop_outlet_node_name = var_source_loop_outlet_node_name
# alpha
var_heating_loop_inlet_node_name = "Heating Loop Inlet Node Name"
obj.heating_loop_inlet_node_name = var_heating_loop_inlet_node_name
# alpha
var_heating_loop_outlet_node_name = "Heating Loop Outlet Node Name"
obj.heating_loop_outlet_node_name = var_heating_loop_outlet_node_name
# real
var_ancillary_power = 0.0
obj.ancillary_power = var_ancillary_power
# object-list
var_ancillary_operation_schedule_name = "object-list|Ancillary Operation Schedule Name"
obj.ancillary_operation_schedule_name = var_ancillary_operation_schedule_name
# alpha
var_chiller_heater_modules_performance_component_object_type_1 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_1 = var_chiller_heater_modules_performance_component_object_type_1
# object-list
var_chiller_heater_modules_performance_component_name_1 = "object-list|Chiller Heater Modules Performance Component Name 1"
obj.chiller_heater_modules_performance_component_name_1 = var_chiller_heater_modules_performance_component_name_1
# object-list
var_chiller_heater_modules_control_schedule_name_1 = "object-list|Chiller Heater Modules Control Schedule Name 1"
obj.chiller_heater_modules_control_schedule_name_1 = var_chiller_heater_modules_control_schedule_name_1
# integer
var_number_of_chiller_heater_modules_1 = 1
obj.number_of_chiller_heater_modules_1 = var_number_of_chiller_heater_modules_1
# alpha
var_chiller_heater_modules_performance_component_object_type_2 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_2 = var_chiller_heater_modules_performance_component_object_type_2
# object-list
var_chiller_heater_modules_performance_component_name_2 = "object-list|Chiller Heater Modules Performance Component Name 2"
obj.chiller_heater_modules_performance_component_name_2 = var_chiller_heater_modules_performance_component_name_2
# object-list
var_chiller_heater_modules_control_schedule_name_2 = "object-list|Chiller Heater Modules Control Schedule Name 2"
obj.chiller_heater_modules_control_schedule_name_2 = var_chiller_heater_modules_control_schedule_name_2
# integer
var_number_of_chiller_heater_modules_2 = 1
obj.number_of_chiller_heater_modules_2 = var_number_of_chiller_heater_modules_2
# alpha
var_chiller_heater_performance_component_object_type_3 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_performance_component_object_type_3 = var_chiller_heater_performance_component_object_type_3
# object-list
var_chiller_heater_performance_component_name_3 = "object-list|Chiller Heater Performance Component Name 3"
obj.chiller_heater_performance_component_name_3 = var_chiller_heater_performance_component_name_3
# object-list
var_chiller_heater_modules_control_schedule_name_3 = "object-list|Chiller Heater Modules Control Schedule Name 3"
obj.chiller_heater_modules_control_schedule_name_3 = var_chiller_heater_modules_control_schedule_name_3
# integer
var_number_of_chiller_heater_modules_3 = 1
obj.number_of_chiller_heater_modules_3 = var_number_of_chiller_heater_modules_3
# alpha
var_chiller_heater_modules_performance_component_object_type_4 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_4 = var_chiller_heater_modules_performance_component_object_type_4
# object-list
var_chiller_heater_modules_performance_component_name_4 = "object-list|Chiller Heater Modules Performance Component Name 4"
obj.chiller_heater_modules_performance_component_name_4 = var_chiller_heater_modules_performance_component_name_4
# object-list
var_chiller_heater_modules_control_schedule_name_4 = "object-list|Chiller Heater Modules Control Schedule Name 4"
obj.chiller_heater_modules_control_schedule_name_4 = var_chiller_heater_modules_control_schedule_name_4
# integer
var_number_of_chiller_heater_modules_4 = 1
obj.number_of_chiller_heater_modules_4 = var_number_of_chiller_heater_modules_4
# alpha
var_chiller_heater_modules_performance_component_object_type_5 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_5 = var_chiller_heater_modules_performance_component_object_type_5
# object-list
var_chiller_heater_models_performance_component_name_5 = "object-list|Chiller Heater Models Performance Component Name 5"
obj.chiller_heater_models_performance_component_name_5 = var_chiller_heater_models_performance_component_name_5
# object-list
var_chiller_heater_modules_control_schedule_name_5 = "object-list|Chiller Heater Modules Control Schedule Name 5"
obj.chiller_heater_modules_control_schedule_name_5 = var_chiller_heater_modules_control_schedule_name_5
# integer
var_number_of_chiller_heater_modules_5 = 1
obj.number_of_chiller_heater_modules_5 = var_number_of_chiller_heater_modules_5
# alpha
var_chiller_heater_modules_performance_component_object_type_6 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_6 = var_chiller_heater_modules_performance_component_object_type_6
# object-list
var_chiller_heater_modules_performance_component_name_6 = "object-list|Chiller Heater Modules Performance Component Name 6"
obj.chiller_heater_modules_performance_component_name_6 = var_chiller_heater_modules_performance_component_name_6
# object-list
var_chiller_heater_modules_control_schedule_name_6 = "object-list|Chiller Heater Modules Control Schedule Name 6"
obj.chiller_heater_modules_control_schedule_name_6 = var_chiller_heater_modules_control_schedule_name_6
# integer
var_number_of_chiller_heater_modules_6 = 1
obj.number_of_chiller_heater_modules_6 = var_number_of_chiller_heater_modules_6
# alpha
var_chiller_heater_modules_performance_component_object_type_7 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_7 = var_chiller_heater_modules_performance_component_object_type_7
# object-list
var_chiller_heater_modules_performance_component_name_7 = "object-list|Chiller Heater Modules Performance Component Name 7"
obj.chiller_heater_modules_performance_component_name_7 = var_chiller_heater_modules_performance_component_name_7
# object-list
var_chiller_heater_modules_control_schedule_name_7 = "object-list|Chiller Heater Modules Control Schedule Name 7"
obj.chiller_heater_modules_control_schedule_name_7 = var_chiller_heater_modules_control_schedule_name_7
# integer
var_number_of_chiller_heater_modules_7 = 1
obj.number_of_chiller_heater_modules_7 = var_number_of_chiller_heater_modules_7
# alpha
var_chiller_heater_modules_performance_component_object_type_8 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_8 = var_chiller_heater_modules_performance_component_object_type_8
# object-list
var_chiller_heater_modules_performance_component_name_8 = "object-list|Chiller Heater Modules Performance Component Name 8"
obj.chiller_heater_modules_performance_component_name_8 = var_chiller_heater_modules_performance_component_name_8
# object-list
var_chiller_heater_modules_control_schedule_name_8 = "object-list|Chiller Heater Modules Control Schedule Name 8"
obj.chiller_heater_modules_control_schedule_name_8 = var_chiller_heater_modules_control_schedule_name_8
# integer
var_number_of_chiller_heater_modules_8 = 1
obj.number_of_chiller_heater_modules_8 = var_number_of_chiller_heater_modules_8
# alpha
var_chiller_heater_modules_performance_component_object_type_9 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_9 = var_chiller_heater_modules_performance_component_object_type_9
# object-list
var_chiller_heater_modules_performance_component_name_9 = "object-list|Chiller Heater Modules Performance Component Name 9"
obj.chiller_heater_modules_performance_component_name_9 = var_chiller_heater_modules_performance_component_name_9
# object-list
var_chiller_heater_modules_control_schedule_name_9 = "object-list|Chiller Heater Modules Control Schedule Name 9"
obj.chiller_heater_modules_control_schedule_name_9 = var_chiller_heater_modules_control_schedule_name_9
# integer
var_number_of_chiller_heater_modules_9 = 1
obj.number_of_chiller_heater_modules_9 = var_number_of_chiller_heater_modules_9
# alpha
var_chiller_heater_modules_performance_component_object_type_10 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_10 = var_chiller_heater_modules_performance_component_object_type_10
# object-list
var_chiller_heater_modules_performance_component_name_10 = "object-list|Chiller Heater Modules Performance Component Name 10"
obj.chiller_heater_modules_performance_component_name_10 = var_chiller_heater_modules_performance_component_name_10
# object-list
var_chiller_heater_modules_control_schedule_name_10 = "object-list|Chiller Heater Modules Control Schedule Name 10"
obj.chiller_heater_modules_control_schedule_name_10 = var_chiller_heater_modules_control_schedule_name_10
# integer
var_number_of_chiller_heater_modules_10 = 1
obj.number_of_chiller_heater_modules_10 = var_number_of_chiller_heater_modules_10
# alpha
var_chiller_heater_modules_performance_component_object_type_11 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_11 = var_chiller_heater_modules_performance_component_object_type_11
# object-list
var_chiller_heater_modules_performance_component_name_11 = "object-list|Chiller Heater Modules Performance Component Name 11"
obj.chiller_heater_modules_performance_component_name_11 = var_chiller_heater_modules_performance_component_name_11
# object-list
var_chiller_heater_module_control_schedule_name_11 = "object-list|Chiller Heater Module Control Schedule Name 11"
obj.chiller_heater_module_control_schedule_name_11 = var_chiller_heater_module_control_schedule_name_11
# integer
var_number_of_chiller_heater_modules_11 = 1
obj.number_of_chiller_heater_modules_11 = var_number_of_chiller_heater_modules_11
# alpha
var_chiller_heater_modules_performance_component_object_type_12 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_12 = var_chiller_heater_modules_performance_component_object_type_12
# object-list
var_chiller_heater_modules_performance_component_name_12 = "object-list|Chiller Heater Modules Performance Component Name 12"
obj.chiller_heater_modules_performance_component_name_12 = var_chiller_heater_modules_performance_component_name_12
# object-list
var_chiller_heater_modules_control_schedule_name_12 = "object-list|Chiller Heater Modules Control Schedule Name 12"
obj.chiller_heater_modules_control_schedule_name_12 = var_chiller_heater_modules_control_schedule_name_12
# integer
var_number_of_chiller_heater_modules_12 = 1
obj.number_of_chiller_heater_modules_12 = var_number_of_chiller_heater_modules_12
# alpha
var_chiller_heater_modules_performance_component_object_type_13 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_13 = var_chiller_heater_modules_performance_component_object_type_13
# object-list
var_chiller_heater_modules_performance_component_name_13 = "object-list|Chiller Heater Modules Performance Component Name 13"
obj.chiller_heater_modules_performance_component_name_13 = var_chiller_heater_modules_performance_component_name_13
# object-list
var_chiller_heater_modules_control_schedule_name_13 = "object-list|Chiller Heater Modules Control Schedule Name 13"
obj.chiller_heater_modules_control_schedule_name_13 = var_chiller_heater_modules_control_schedule_name_13
# integer
var_number_of_chiller_heater_modules_13 = 1
obj.number_of_chiller_heater_modules_13 = var_number_of_chiller_heater_modules_13
# alpha
var_chiller_heater_modules_performance_component_object_type_14 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_14 = var_chiller_heater_modules_performance_component_object_type_14
# object-list
var_chiller_heater_modules_performance_component_name_14 = "object-list|Chiller Heater Modules Performance Component Name 14"
obj.chiller_heater_modules_performance_component_name_14 = var_chiller_heater_modules_performance_component_name_14
# object-list
var_chiller_heater_modules_control_schedule_name_14 = "object-list|Chiller Heater Modules Control Schedule Name 14"
obj.chiller_heater_modules_control_schedule_name_14 = var_chiller_heater_modules_control_schedule_name_14
# integer
var_number_of_chiller_heater_modules_14 = 1
obj.number_of_chiller_heater_modules_14 = var_number_of_chiller_heater_modules_14
# alpha
var_chiller_heater_modules_performance_component_object_type_15 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_15 = var_chiller_heater_modules_performance_component_object_type_15
# object-list
var_chiller_heater_modules_performance_component_name_15 = "object-list|Chiller Heater Modules Performance Component Name 15"
obj.chiller_heater_modules_performance_component_name_15 = var_chiller_heater_modules_performance_component_name_15
# object-list
var_chiller_heater_modules_control_schedule_name_15 = "object-list|Chiller Heater Modules Control Schedule Name 15"
obj.chiller_heater_modules_control_schedule_name_15 = var_chiller_heater_modules_control_schedule_name_15
# integer
var_number_of_chiller_heater_modules_15 = 1
obj.number_of_chiller_heater_modules_15 = var_number_of_chiller_heater_modules_15
# alpha
var_chiller_heater_modules_performance_component_object_type_16 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_16 = var_chiller_heater_modules_performance_component_object_type_16
# object-list
var_chiller_heater_modules_performance_component_name_16 = "object-list|Chiller Heater Modules Performance Component Name 16"
obj.chiller_heater_modules_performance_component_name_16 = var_chiller_heater_modules_performance_component_name_16
# object-list
var_chiller_heater_modules_control_schedule_name_16 = "object-list|Chiller Heater Modules Control Schedule Name 16"
obj.chiller_heater_modules_control_schedule_name_16 = var_chiller_heater_modules_control_schedule_name_16
# integer
var_number_of_chiller_heater_modules_16 = 1
obj.number_of_chiller_heater_modules_16 = var_number_of_chiller_heater_modules_16
# alpha
var_chiller_heater_modules_performance_component_object_type_17 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_17 = var_chiller_heater_modules_performance_component_object_type_17
# object-list
var_chiller_heater_modules_performance_component_name_17 = "object-list|Chiller Heater Modules Performance Component Name 17"
obj.chiller_heater_modules_performance_component_name_17 = var_chiller_heater_modules_performance_component_name_17
# object-list
var_chiller_heater_modules_control_schedule_name_17 = "object-list|Chiller Heater Modules Control Schedule Name 17"
obj.chiller_heater_modules_control_schedule_name_17 = var_chiller_heater_modules_control_schedule_name_17
# integer
var_number_of_chiller_heater_modules_17 = 1
obj.number_of_chiller_heater_modules_17 = var_number_of_chiller_heater_modules_17
# alpha
var_chiller_heater_modules_performance_component_object_type_18 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_18 = var_chiller_heater_modules_performance_component_object_type_18
# object-list
var_chiller_heater_modules_performance_component_name_18 = "object-list|Chiller Heater Modules Performance Component Name 18"
obj.chiller_heater_modules_performance_component_name_18 = var_chiller_heater_modules_performance_component_name_18
# object-list
var_chiller_heater_modules_control_control_schedule_name_18 = "object-list|Chiller Heater Modules Control Control Schedule Name 18"
obj.chiller_heater_modules_control_control_schedule_name_18 = var_chiller_heater_modules_control_control_schedule_name_18
# integer
var_number_of_chiller_heater_modules_18 = 1
obj.number_of_chiller_heater_modules_18 = var_number_of_chiller_heater_modules_18
# alpha
var_chiller_heater_modules_performance_component_object_type_19 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_19 = var_chiller_heater_modules_performance_component_object_type_19
# object-list
var_chiller_heater_modules_performance_component_name_19 = "object-list|Chiller Heater Modules Performance Component Name 19"
obj.chiller_heater_modules_performance_component_name_19 = var_chiller_heater_modules_performance_component_name_19
# object-list
var_chiller_heater_modules_control_schedule_name_19 = "object-list|Chiller Heater Modules Control Schedule Name 19"
obj.chiller_heater_modules_control_schedule_name_19 = var_chiller_heater_modules_control_schedule_name_19
# integer
var_number_of_chiller_heater_modules_19 = 1
obj.number_of_chiller_heater_modules_19 = var_number_of_chiller_heater_modules_19
# alpha
var_chiller_heater_modules_performance_component_object_type_20 = "ChillerHeaterPerformance:Electric:EIR"
obj.chiller_heater_modules_performance_component_object_type_20 = var_chiller_heater_modules_performance_component_object_type_20
# object-list
var_chiller_heater_modules_performance_component_name_20 = "object-list|Chiller Heater Modules Performance Component Name 20"
obj.chiller_heater_modules_performance_component_name_20 = var_chiller_heater_modules_performance_component_name_20
# object-list
var_chiller_heater_modules_control_schedule_name_20 = "object-list|Chiller Heater Modules Control Schedule Name 20"
obj.chiller_heater_modules_control_schedule_name_20 = var_chiller_heater_modules_control_schedule_name_20
# integer
var_number_of_chiller_heater_modules_20 = 1
obj.number_of_chiller_heater_modules_20 = var_number_of_chiller_heater_modules_20
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.centralheatpumpsystems[0].name, var_name)
self.assertEqual(idf2.centralheatpumpsystems[0].control_method, var_control_method)
self.assertEqual(idf2.centralheatpumpsystems[0].cooling_loop_inlet_node_name, var_cooling_loop_inlet_node_name)
self.assertEqual(idf2.centralheatpumpsystems[0].cooling_loop_outlet_node_name, var_cooling_loop_outlet_node_name)
self.assertEqual(idf2.centralheatpumpsystems[0].source_loop_inlet_node_name, var_source_loop_inlet_node_name)
self.assertEqual(idf2.centralheatpumpsystems[0].source_loop_outlet_node_name, var_source_loop_outlet_node_name)
self.assertEqual(idf2.centralheatpumpsystems[0].heating_loop_inlet_node_name, var_heating_loop_inlet_node_name)
self.assertEqual(idf2.centralheatpumpsystems[0].heating_loop_outlet_node_name, var_heating_loop_outlet_node_name)
self.assertAlmostEqual(idf2.centralheatpumpsystems[0].ancillary_power, var_ancillary_power)
self.assertEqual(idf2.centralheatpumpsystems[0].ancillary_operation_schedule_name, var_ancillary_operation_schedule_name)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_1, var_chiller_heater_modules_performance_component_object_type_1)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_1, var_chiller_heater_modules_performance_component_name_1)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_1, var_chiller_heater_modules_control_schedule_name_1)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_1, var_number_of_chiller_heater_modules_1)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_2, var_chiller_heater_modules_performance_component_object_type_2)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_2, var_chiller_heater_modules_performance_component_name_2)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_2, var_chiller_heater_modules_control_schedule_name_2)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_2, var_number_of_chiller_heater_modules_2)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_performance_component_object_type_3, var_chiller_heater_performance_component_object_type_3)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_performance_component_name_3, var_chiller_heater_performance_component_name_3)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_3, var_chiller_heater_modules_control_schedule_name_3)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_3, var_number_of_chiller_heater_modules_3)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_4, var_chiller_heater_modules_performance_component_object_type_4)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_4, var_chiller_heater_modules_performance_component_name_4)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_4, var_chiller_heater_modules_control_schedule_name_4)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_4, var_number_of_chiller_heater_modules_4)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_5, var_chiller_heater_modules_performance_component_object_type_5)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_models_performance_component_name_5, var_chiller_heater_models_performance_component_name_5)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_5, var_chiller_heater_modules_control_schedule_name_5)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_5, var_number_of_chiller_heater_modules_5)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_6, var_chiller_heater_modules_performance_component_object_type_6)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_6, var_chiller_heater_modules_performance_component_name_6)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_6, var_chiller_heater_modules_control_schedule_name_6)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_6, var_number_of_chiller_heater_modules_6)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_7, var_chiller_heater_modules_performance_component_object_type_7)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_7, var_chiller_heater_modules_performance_component_name_7)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_7, var_chiller_heater_modules_control_schedule_name_7)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_7, var_number_of_chiller_heater_modules_7)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_8, var_chiller_heater_modules_performance_component_object_type_8)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_8, var_chiller_heater_modules_performance_component_name_8)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_8, var_chiller_heater_modules_control_schedule_name_8)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_8, var_number_of_chiller_heater_modules_8)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_9, var_chiller_heater_modules_performance_component_object_type_9)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_9, var_chiller_heater_modules_performance_component_name_9)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_9, var_chiller_heater_modules_control_schedule_name_9)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_9, var_number_of_chiller_heater_modules_9)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_10, var_chiller_heater_modules_performance_component_object_type_10)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_10, var_chiller_heater_modules_performance_component_name_10)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_10, var_chiller_heater_modules_control_schedule_name_10)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_10, var_number_of_chiller_heater_modules_10)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_11, var_chiller_heater_modules_performance_component_object_type_11)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_11, var_chiller_heater_modules_performance_component_name_11)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_module_control_schedule_name_11, var_chiller_heater_module_control_schedule_name_11)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_11, var_number_of_chiller_heater_modules_11)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_12, var_chiller_heater_modules_performance_component_object_type_12)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_12, var_chiller_heater_modules_performance_component_name_12)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_12, var_chiller_heater_modules_control_schedule_name_12)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_12, var_number_of_chiller_heater_modules_12)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_13, var_chiller_heater_modules_performance_component_object_type_13)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_13, var_chiller_heater_modules_performance_component_name_13)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_13, var_chiller_heater_modules_control_schedule_name_13)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_13, var_number_of_chiller_heater_modules_13)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_14, var_chiller_heater_modules_performance_component_object_type_14)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_14, var_chiller_heater_modules_performance_component_name_14)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_14, var_chiller_heater_modules_control_schedule_name_14)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_14, var_number_of_chiller_heater_modules_14)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_15, var_chiller_heater_modules_performance_component_object_type_15)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_15, var_chiller_heater_modules_performance_component_name_15)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_15, var_chiller_heater_modules_control_schedule_name_15)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_15, var_number_of_chiller_heater_modules_15)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_16, var_chiller_heater_modules_performance_component_object_type_16)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_16, var_chiller_heater_modules_performance_component_name_16)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_16, var_chiller_heater_modules_control_schedule_name_16)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_16, var_number_of_chiller_heater_modules_16)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_17, var_chiller_heater_modules_performance_component_object_type_17)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_17, var_chiller_heater_modules_performance_component_name_17)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_17, var_chiller_heater_modules_control_schedule_name_17)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_17, var_number_of_chiller_heater_modules_17)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_18, var_chiller_heater_modules_performance_component_object_type_18)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_18, var_chiller_heater_modules_performance_component_name_18)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_control_schedule_name_18, var_chiller_heater_modules_control_control_schedule_name_18)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_18, var_number_of_chiller_heater_modules_18)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_19, var_chiller_heater_modules_performance_component_object_type_19)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_19, var_chiller_heater_modules_performance_component_name_19)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_19, var_chiller_heater_modules_control_schedule_name_19)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_19, var_number_of_chiller_heater_modules_19)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_object_type_20, var_chiller_heater_modules_performance_component_object_type_20)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_performance_component_name_20, var_chiller_heater_modules_performance_component_name_20)
self.assertEqual(idf2.centralheatpumpsystems[0].chiller_heater_modules_control_schedule_name_20, var_chiller_heater_modules_control_schedule_name_20)
self.assertEqual(idf2.centralheatpumpsystems[0].number_of_chiller_heater_modules_20, var_number_of_chiller_heater_modules_20) | 89.682741 | 181 | 0.839253 | 4,506 | 35,335 | 5.955171 | 0.023524 | 0.213162 | 0.3108 | 0.234516 | 0.972721 | 0.95733 | 0.944548 | 0.900909 | 0.829246 | 0.730789 | 0 | 0.028453 | 0.11974 | 35,335 | 394 | 182 | 89.682741 | 0.834266 | 0.02332 | 0 | 0 | 0 | 0 | 0.099067 | 0.022109 | 0 | 0 | 0 | 0 | 0.306122 | 1 | 0.010204 | false | 0 | 0.027211 | 0 | 0.040816 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
b26217c03d1f8399d5008aedd75d0db8403e0afa | 571 | py | Python | momsfab/doc_events/purchase_invoice.py | Amruthaohm/momsfab | 1e99d6dd19fea194c683eb63e44188f7329312d1 | [
"MIT"
] | null | null | null | momsfab/doc_events/purchase_invoice.py | Amruthaohm/momsfab | 1e99d6dd19fea194c683eb63e44188f7329312d1 | [
"MIT"
] | null | null | null | momsfab/doc_events/purchase_invoice.py | Amruthaohm/momsfab | 1e99d6dd19fea194c683eb63e44188f7329312d1 | [
"MIT"
] | 3 | 2022-03-30T04:00:34.000Z | 2022-03-30T04:11:44.000Z | import frappe
def on_submit_pi(doc, method):
pass
# for i in doc.budget_bom_reference:
# if i.budget_bom:
# frappe.db.sql(""" UPDATE `tabBudget BOM` SET status=%s WHERE name=%s """,
# ("Completed", i.budget_bom))
# frappe.db.commit()
def on_submit_si(doc, method):
for i in doc.budget_bom_reference:
if i.budget_bom:
frappe.db.sql(""" UPDATE `tabBudget BOM` SET status=%s WHERE name=%s """,
("Completed", i.budget_bom))
frappe.db.commit() | 33.588235 | 88 | 0.549912 | 75 | 571 | 4.026667 | 0.373333 | 0.178808 | 0.13245 | 0.211921 | 0.801325 | 0.801325 | 0.801325 | 0.801325 | 0.801325 | 0.801325 | 0 | 0 | 0.318739 | 571 | 17 | 89 | 33.588235 | 0.77635 | 0.378284 | 0 | 0 | 0 | 0 | 0.177143 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0.111111 | 0.111111 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
b289813e9fcc84b6f96635c6d599691ad89a9505 | 73 | py | Python | rsocket/logger.py | Precognize/rsocket-py | 31704d53c232e0c0f53783b9a56117e5bd0645ce | [
"MIT"
] | null | null | null | rsocket/logger.py | Precognize/rsocket-py | 31704d53c232e0c0f53783b9a56117e5bd0645ce | [
"MIT"
] | null | null | null | rsocket/logger.py | Precognize/rsocket-py | 31704d53c232e0c0f53783b9a56117e5bd0645ce | [
"MIT"
] | null | null | null | import logging
def logger():
return logging.getLogger('pyrsocket')
| 12.166667 | 41 | 0.726027 | 8 | 73 | 6.625 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.164384 | 73 | 5 | 42 | 14.6 | 0.868852 | 0 | 0 | 0 | 0 | 0 | 0.123288 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
b293e307b8c3da46935a6712f19c06597dd3dcf1 | 65,734 | py | Python | data_facility_admin/migrations/0001_initial.py | NYU-CI/dfadmin | 071f38c62aea8ef8bf4ae82dbd672694e719b9bf | [
"CC0-1.0"
] | 1 | 2021-04-08T05:22:35.000Z | 2021-04-08T05:22:35.000Z | data_facility_admin/migrations/0001_initial.py | NYU-CI/dfadmin | 071f38c62aea8ef8bf4ae82dbd672694e719b9bf | [
"CC0-1.0"
] | 8 | 2019-08-05T18:16:07.000Z | 2019-10-29T18:42:53.000Z | data_facility_admin/migrations/0001_initial.py | NYU-CI/dfadmin | 071f38c62aea8ef8bf4ae82dbd672694e719b9bf | [
"CC0-1.0"
] | 2 | 2019-09-11T15:24:32.000Z | 2020-01-08T20:34:05.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2018-02-16 02:07
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='DataAgreement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=256)),
('text', models.TextField(blank=True, max_length=256)),
('document', models.FileField(blank=True, max_length=256, upload_to=b'file_upload/data_agreements')),
('delete_on_expiration', models.BooleanField(default=False)),
('expiration_date', models.DateField(blank=True, default=None)),
('deletion_method', models.CharField(blank=True, max_length=256)),
('version', models.CharField(blank=True, max_length=256)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['dataset', 'version', 'title'],
},
),
migrations.CreateModel(
name='DataAgreementSignature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('accepted', models.BooleanField()),
('date', models.DateField(auto_now_add=True)),
('status', models.CharField(choices=[(b'To sign', b'To sign'), (b'Signed', b'Signed')], default=b'To sign', max_length=256)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('data_agreement', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.DataAgreement')),
],
options={
'ordering': ['user', 'data_agreement'],
},
),
migrations.CreateModel(
name='DataProvider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='DatasetAccess',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system, so it is possible to trace back if more info is needed.', null=True)),
('requested_at', models.DateTimeField(blank=True, null=True)),
('reviewed_at', models.DateTimeField(blank=True, null=True)),
('granted_at', models.DateTimeField(blank=True, null=True)),
('expire_at', models.DateTimeField(blank=True, null=True)),
('motivation', models.TextField(blank=True, help_text=b'The reason of this request.', max_length=256, null=True)),
('load_to_database', models.BooleanField(default=False)),
('database_name', models.CharField(blank=True, max_length=256)),
('schema', models.CharField(blank=True, max_length=256)),
('database_status', models.CharField(choices=[(b'Requested', b'Requested'), (b'Loaded', b'Loaded')], default=b'Requested', max_length=256)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['project', 'dataset_id'],
},
),
migrations.CreateModel(
name='DataSteward',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_date', models.DateField()),
('end_date', models.DateField(blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['dataset', 'user', 'start_date', 'end_date'],
},
),
migrations.CreateModel(
name='HistoricalDataAgreement',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('title', models.CharField(max_length=256)),
('text', models.TextField(blank=True, max_length=256)),
('document', models.TextField(blank=True, max_length=256)),
('delete_on_expiration', models.BooleanField(default=False)),
('expiration_date', models.DateField(blank=True, default=None)),
('deletion_method', models.CharField(blank=True, max_length=256)),
('version', models.CharField(blank=True, max_length=256)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical data agreement',
},
),
migrations.CreateModel(
name='HistoricalDataAgreementSignature',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('accepted', models.BooleanField()),
('date', models.DateField(blank=True, editable=False)),
('status', models.CharField(choices=[(b'To sign', b'To sign'), (b'Signed', b'Signed')], default=b'To sign', max_length=256)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('data_agreement', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.DataAgreement')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical data agreement signature',
},
),
migrations.CreateModel(
name='HistoricalDataset',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('ldap_id', models.IntegerField(blank=True, db_index=True, editable=False, help_text=b'This is an internal LDAP information. Don`t change this value unless you know what you`re doing.', null=True)),
('ldap_name', models.CharField(blank=True, db_index=True, max_length=256, null=True, validators=[django.core.validators.RegexValidator(regex=b'^[a-z_-]{5,}$')])),
('public', models.BooleanField(default=False, help_text=b'Check this if everyone should have access to this dataset.')),
('doi', models.CharField(db_index=True, max_length=256)),
('name', models.CharField(max_length=256)),
('version', models.CharField(blank=True, default=b'1', max_length=256, null=True)),
('storage_location', models.CharField(blank=True, help_text=b'Location of this dataset (Optional if dataset is green).', max_length=256, null=True)),
('vault_volume', models.CharField(blank=True, help_text=b'Volume on DF Vault. For non green datasets only. This information is internal.', max_length=256, null=True)),
('needs_review', models.BooleanField(default=False)),
('shareable', models.BooleanField(default=True, help_text=b'Indicates if this dataset can be shared with other users.')),
('last_reported_at', models.DateField(blank=True, null=True)),
('expiration', models.DateField(blank=True, null=True)),
('data_classification', models.CharField(choices=[(b'Green', b'Green'), (b'Restricted Green', b'Restricted Green'), (b'Yellow', b'Yellow'), (b'Red', b'Red')], default=b'Green', max_length=256)),
('report_frequency', models.CharField(choices=[(b'No Reporting Needed', b'No Reporting Needed'), (b'Quarterly', b'Quarterly'), (b'Biannual', b'Biannual'), (b'Annual', b'Annual')], default=b'No Reporting Needed', max_length=256)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('data_provider', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.DataProvider')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical dataset',
},
),
migrations.CreateModel(
name='HistoricalDatasetAccess',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system, so it is possible to trace back if more info is needed.', null=True)),
('requested_at', models.DateTimeField(blank=True, null=True)),
('reviewed_at', models.DateTimeField(blank=True, null=True)),
('granted_at', models.DateTimeField(blank=True, null=True)),
('expire_at', models.DateTimeField(blank=True, null=True)),
('motivation', models.TextField(blank=True, help_text=b'The reason of this request.', max_length=256, null=True)),
('load_to_database', models.BooleanField(default=False)),
('database_name', models.CharField(blank=True, max_length=256)),
('schema', models.CharField(blank=True, max_length=256)),
('database_status', models.CharField(choices=[(b'Requested', b'Requested'), (b'Loaded', b'Loaded')], default=b'Requested', max_length=256)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical dataset access',
},
),
migrations.CreateModel(
name='HistoricalDataSteward',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('start_date', models.DateField()),
('end_date', models.DateField(blank=True)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical data steward',
},
),
migrations.CreateModel(
name='HistoricalDfRole',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('ldap_id', models.IntegerField(blank=True, db_index=True, editable=False, help_text=b'This is an internal LDAP information. Don`t change this value unless you know what you`re doing.', null=True)),
('ldap_name', models.CharField(blank=True, db_index=True, max_length=256, null=True, validators=[django.core.validators.RegexValidator(regex=b'^[a-z_-]{5,}$')])),
('name', models.CharField(db_index=True, max_length=256)),
('description', models.CharField(blank=True, max_length=256, null=True)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical Data Facility Role',
},
),
migrations.CreateModel(
name='HistoricalProfileTag',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('text', models.CharField(db_index=True, max_length=256)),
('description', models.CharField(blank=True, max_length=256)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical profile tag',
},
),
migrations.CreateModel(
name='HistoricalProject',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('ldap_id', models.IntegerField(blank=True, db_index=True, editable=False, help_text=b'This is an internal LDAP information. Don`t change this value unless you know what you`re doing.', null=True)),
('ldap_name', models.CharField(blank=True, db_index=True, max_length=256, null=True, validators=[django.core.validators.RegexValidator(regex=b'^[a-z_-]{5,}$')])),
('has_irb', models.BooleanField(default=False)),
('name', models.CharField(max_length=256)),
('abstract', models.TextField(max_length=256)),
('methodology', models.TextField(blank=True, max_length=256)),
('expected_outcomes', models.TextField(blank=True, max_length=256)),
('status', models.CharField(choices=[(b'Pending Approval', b'Pending Approval'), (b'Active', b'Active'), (b'Archived', b'Archived')], default=b'Pending Approval', max_length=20)),
('type', models.CharField(choices=[(b'Capstone', b'Capstone'), (b'Class', b'Class'), (b'Research', b'Research')], default=b'Research', max_length=10)),
('environment', models.CharField(blank=True, choices=[(b'Green', b'Green'), (b'Yellow', b'Yellow'), (b'Red', b'Red')], default=b'Green', max_length=10)),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system, so it is possible to trace back if more info is needed.', null=True)),
('workspace_path', models.CharField(blank=True, max_length=256)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical project',
},
),
migrations.CreateModel(
name='HistoricalProjectMember',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system (if not the same from project creation), so it is possible to trace back if more info is needed.', null=True)),
('start_date', models.DateTimeField(blank=True, null=True)),
('end_date', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical project member',
},
),
migrations.CreateModel(
name='HistoricalProjectRole',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name', models.CharField(db_index=True, help_text=b'This is a friendly name, such as Student (Reader) or Instructor (Admin).', max_length=256)),
('description', models.TextField(blank=True, max_length=256)),
('system_role', models.CharField(choices=[(b'Reader', b'Reader'), (b'Writer', b'Writer'), (b'Admin', b'Admin')], default=b'Admin', max_length=10)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical project role',
},
),
migrations.CreateModel(
name='HistoricalProjectTool',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('additional_info', models.CharField(blank=True, help_text=b'Additional info, such as database name. In case it does not follow the Data Facility convention.', max_length=256)),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system (if not the same from project creation), so it is possible to trace back if more info is needed.', null=True)),
('tool_name', models.CharField(choices=[(b'Git Lab', b'Git Lab'), (b'Postgres', b'Postgres'), (b'Oracle', b'Oracle'), (b'POSIX', b'POSIX'), (b'Other', b'Other')], default=b'Postgres', max_length=10)),
('other_name', models.CharField(blank=True, help_text=b'Specify the tool name if Other is selected.', max_length=256)),
('status', models.CharField(choices=[(b'Active', b'Active'), (b'Deactivated', b'Deactivated')], default=b'Active', max_length=256)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical project tool',
},
),
migrations.CreateModel(
name='HistoricalSignedTermsOfUse',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('signed_at', models.DateField()),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical signed terms of use',
},
),
migrations.CreateModel(
name='HistoricalTermsOfUse',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('text', models.CharField(db_index=True, max_length=256)),
('version', models.IntegerField(blank=True)),
('release_date', models.DateField(blank=True, editable=False)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical terms of use',
},
),
migrations.CreateModel(
name='HistoricalTraining',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('url', models.URLField(blank=True, help_text=b'Training website, if any.', max_length=256, null=True)),
('description', models.TextField(blank=True, max_length=256, null=True)),
('date', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical training',
},
),
migrations.CreateModel(
name='HistoricalUser',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('ldap_id', models.IntegerField(blank=True, db_index=True, editable=False, help_text=b'This is an internal LDAP information. Don`t change this value unless you know what you`re doing.', null=True)),
('ldap_name', models.CharField(blank=True, db_index=True, max_length=256, null=True, validators=[django.core.validators.RegexValidator(regex=b'^[a-z_-]{5,}$')])),
('first_name', models.CharField(max_length=256)),
('last_name', models.CharField(max_length=256)),
('orc_id', models.CharField(blank=True, max_length=256)),
('affiliation', models.CharField(blank=True, max_length=256)),
('email', models.EmailField(db_index=True, max_length=254)),
('job_title', models.CharField(blank=True, max_length=256)),
('sponsor', models.CharField(blank=True, max_length=256)),
('status', models.CharField(choices=[(b'Pending Approval', b'Pending Approval'), (b'New', b'New'), (b'Active', b'Active'), (b'Locked', b'Locked'), (b'Locked by too many failed auth attempts', b'Locked by too many failed auth attempts (AUTO)'), (b'Locked by inactivity', b'Locked by inactivity (AUTO)'), (b'Unlocked', b'Unlocked'), (b'Disabled', b'Disabled')], default=b'New', help_text=b'\n <b>Pending Approval</b>: Accounts on this status will not be created on the system. <br/>\n <b>New</b>: DFAdmin to create an account on ADRF. DFAdmin will change the status to active after account creation.\n <br/>\n <b>Active</b>: Users that can access the system. <br/>\n <b>Locked</b>: The user account is locked and will not be able to authenticate. \n To unlock the user, set the status to unlocked.<br/>\n <b>Locked by too many failed attempts</b>: This is an automatic status and the admin should not use it.\n After the defined time, the user will return automatically to active. <br/>\n <b>Locked by inactivity</b>: This is an automatic status, the admin should not use this status. \n To unlock the user, set the status to unlocked.<br/>\n <b>Unlocked</b>: Admins should use this status to return a user to active. \n This can be used (1) before the automatic time, when they\'re locked by too many failed attempts; \n or (2) when the user is locked by inactivity.<br/>\n <b>Disabled</b>: This status should be used instead of removing a user. <br/>\n <a target="_blank" \n href=\'https://github.com/NYU-Chicago-data-facility/dfadmin/blob/master/documentation/State%20machine%20Diagram%20-%20User%20Status.jpg\'>\n State machine Diagram - User Status (DFAdmin GitHub)\n </a>\n ', max_length=256)),
('signed_terms_at', models.DateField(blank=True, help_text=b'Indicates when the user signed the Data Facility Terms of User', null=True)),
('ldap_last_auth_time', models.DateTimeField(blank=True, editable=False, null=True, verbose_name=b'Last Access')),
('ldap_lock_time', models.DateTimeField(blank=True, editable=False, null=True)),
('ldap_last_pwd_change', models.DateTimeField(blank=True, editable=False, null=True, verbose_name=b'Last Password Change')),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('system_user', models.BooleanField(default=False, help_text=b'Is it a system user?')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical user',
},
),
migrations.CreateModel(
name='HistoricalUserDfRole',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('begin', models.DateTimeField()),
('end', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical user df role',
},
),
migrations.CreateModel(
name='HistoricalUserTraining',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('date', models.DateField(blank=True, help_text=b'When this training was taken, for trainings without a specific date.', null=True)),
('created_at', models.DateTimeField(blank=True, editable=False)),
('updated_at', models.DateTimeField(blank=True, editable=False)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical user training',
},
),
migrations.CreateModel(
name='LdapObject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ldap_id', models.IntegerField(blank=True, editable=False, help_text=b'This is an internal LDAP information. Don`t change this value unless you know what you`re doing.', null=True, unique=True)),
('ldap_name', models.CharField(blank=True, max_length=256, null=True, unique=True, validators=[django.core.validators.RegexValidator(regex=b'^[a-z_-]{5,}$')])),
],
),
migrations.CreateModel(
name='ProfileTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=256, unique=True)),
('description', models.CharField(blank=True, max_length=256)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['text'],
},
),
migrations.CreateModel(
name='ProjectMember',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system (if not the same from project creation), so it is possible to trace back if more info is needed.', null=True)),
('start_date', models.DateTimeField(blank=True, null=True)),
('end_date', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['project', 'member'],
},
),
migrations.CreateModel(
name='ProjectRole',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text=b'This is a friendly name, such as Student (Reader) or Instructor (Admin).', max_length=256, unique=True)),
('description', models.TextField(blank=True, max_length=256)),
('system_role', models.CharField(choices=[(b'Reader', b'Reader'), (b'Writer', b'Writer'), (b'Admin', b'Admin')], default=b'Admin', max_length=10)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['name'],
},
),
migrations.CreateModel(
name='ProjectTool',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('additional_info', models.CharField(blank=True, help_text=b'Additional info, such as database name. In case it does not follow the Data Facility convention.', max_length=256)),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system (if not the same from project creation), so it is possible to trace back if more info is needed.', null=True)),
('tool_name', models.CharField(choices=[(b'Git Lab', b'Git Lab'), (b'Postgres', b'Postgres'), (b'Oracle', b'Oracle'), (b'POSIX', b'POSIX'), (b'Other', b'Other')], default=b'Postgres', max_length=10)),
('other_name', models.CharField(blank=True, help_text=b'Specify the tool name if Other is selected.', max_length=256)),
('status', models.CharField(choices=[(b'Active', b'Active'), (b'Deactivated', b'Deactivated')], default=b'Active', max_length=256)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['tool_name', 'other_name'],
},
),
migrations.CreateModel(
name='SignedTermsOfUse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('signed_at', models.DateField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['user', 'terms_of_use', 'signed_at'],
},
),
migrations.CreateModel(
name='SystemInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_export', models.DateTimeField(blank=True, editable=False, null=True)),
('last_import', models.DateTimeField(blank=True, editable=False, null=True)),
],
),
migrations.CreateModel(
name='TermsOfUse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=256, unique=True)),
('version', models.IntegerField(blank=True)),
('release_date', models.DateField(auto_now_add=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['version'],
},
),
migrations.CreateModel(
name='Training',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('url', models.URLField(blank=True, help_text=b'Training website, if any.', max_length=256, null=True)),
('description', models.TextField(blank=True, max_length=256, null=True)),
('date', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['name', 'date'],
},
),
migrations.CreateModel(
name='UserDfRole',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('begin', models.DateTimeField()),
('end', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['user', 'role'],
},
),
migrations.CreateModel(
name='UserTraining',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(blank=True, help_text=b'When this training was taken, for trainings without a specific date.', null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('training', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.Training')),
],
options={
'ordering': ['date', 'user', 'training'],
},
),
migrations.CreateModel(
name='Dataset',
fields=[
('ldapobject_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='data_facility_admin.LdapObject')),
('public', models.BooleanField(default=False, help_text=b'Check this if everyone should have access to this dataset.')),
('doi', models.CharField(max_length=256, unique=True)),
('name', models.CharField(max_length=256)),
('version', models.CharField(blank=True, default=b'1', max_length=256, null=True)),
('storage_location', models.CharField(blank=True, help_text=b'Location of this dataset (Optional if dataset is green).', max_length=256, null=True)),
('vault_volume', models.CharField(blank=True, help_text=b'Volume on DF Vault. For non green datasets only. This information is internal.', max_length=256, null=True)),
('needs_review', models.BooleanField(default=False)),
('shareable', models.BooleanField(default=True, help_text=b'Indicates if this dataset can be shared with other users.')),
('last_reported_at', models.DateField(blank=True, null=True)),
('expiration', models.DateField(blank=True, null=True)),
('data_classification', models.CharField(choices=[(b'Green', b'Green'), (b'Restricted Green', b'Restricted Green'), (b'Yellow', b'Yellow'), (b'Red', b'Red')], default=b'Green', max_length=256)),
('report_frequency', models.CharField(choices=[(b'No Reporting Needed', b'No Reporting Needed'), (b'Quarterly', b'Quarterly'), (b'Biannual', b'Biannual'), (b'Annual', b'Annual')], default=b'No Reporting Needed', max_length=256)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('data_provider', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.DataProvider')),
],
options={
'ordering': ['doi', 'name'],
},
bases=('data_facility_admin.ldapobject',),
),
migrations.CreateModel(
name='DfRole',
fields=[
('ldapobject_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='data_facility_admin.LdapObject')),
('name', models.CharField(max_length=256, unique=True)),
('description', models.CharField(blank=True, max_length=256, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['name'],
'verbose_name': 'Data Facility Role',
'verbose_name_plural': 'Data Facility Roles',
},
bases=('data_facility_admin.ldapobject',),
),
migrations.CreateModel(
name='Project',
fields=[
('ldapobject_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='data_facility_admin.LdapObject')),
('has_irb', models.BooleanField(default=False)),
('name', models.CharField(max_length=256)),
('abstract', models.TextField(max_length=256)),
('methodology', models.TextField(blank=True, max_length=256)),
('expected_outcomes', models.TextField(blank=True, max_length=256)),
('status', models.CharField(choices=[(b'Pending Approval', b'Pending Approval'), (b'Active', b'Active'), (b'Archived', b'Archived')], default=b'Pending Approval', max_length=20)),
('type', models.CharField(choices=[(b'Capstone', b'Capstone'), (b'Class', b'Class'), (b'Research', b'Research')], default=b'Research', max_length=10)),
('environment', models.CharField(blank=True, choices=[(b'Green', b'Green'), (b'Yellow', b'Yellow'), (b'Red', b'Red')], default=b'Green', max_length=10)),
('request_id', models.IntegerField(blank=True, default=None, help_text=b'Id for from the ticketing system, so it is possible to trace back if more info is needed.', null=True)),
('workspace_path', models.CharField(blank=True, max_length=256)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('instructors', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.DfRole')),
],
options={
'ordering': ['name'],
},
bases=('data_facility_admin.ldapobject',),
),
migrations.CreateModel(
name='User',
fields=[
('ldapobject_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='data_facility_admin.LdapObject')),
('first_name', models.CharField(max_length=256)),
('last_name', models.CharField(max_length=256)),
('orc_id', models.CharField(blank=True, max_length=256)),
('affiliation', models.CharField(blank=True, max_length=256)),
('email', models.EmailField(max_length=254, unique=True)),
('job_title', models.CharField(blank=True, max_length=256)),
('sponsor', models.CharField(blank=True, max_length=256)),
('status', models.CharField(choices=[(b'Pending Approval', b'Pending Approval'), (b'New', b'New'), (b'Active', b'Active'), (b'Locked', b'Locked'), (b'Locked by too many failed auth attempts', b'Locked by too many failed auth attempts (AUTO)'), (b'Locked by inactivity', b'Locked by inactivity (AUTO)'), (b'Unlocked', b'Unlocked'), (b'Disabled', b'Disabled')], default=b'New', help_text=b'\n <b>Pending Approval</b>: Accounts on this status will not be created on the system. <br/>\n <b>New</b>: DFAdmin to create an account on ADRF. DFAdmin will change the status to active after account creation.\n <br/>\n <b>Active</b>: Users that can access the system. <br/>\n <b>Locked</b>: The user account is locked and will not be able to authenticate. \n To unlock the user, set the status to unlocked.<br/>\n <b>Locked by too many failed attempts</b>: This is an automatic status and the admin should not use it.\n After the defined time, the user will return automatically to active. <br/>\n <b>Locked by inactivity</b>: This is an automatic status, the admin should not use this status. \n To unlock the user, set the status to unlocked.<br/>\n <b>Unlocked</b>: Admins should use this status to return a user to active. \n This can be used (1) before the automatic time, when they\'re locked by too many failed attempts; \n or (2) when the user is locked by inactivity.<br/>\n <b>Disabled</b>: This status should be used instead of removing a user. <br/>\n <a target="_blank" \n href=\'https://github.com/NYU-Chicago-data-facility/dfadmin/blob/master/documentation/State%20machine%20Diagram%20-%20User%20Status.jpg\'>\n State machine Diagram - User Status (DFAdmin GitHub)\n </a>\n ', max_length=256)),
('signed_terms_at', models.DateField(blank=True, help_text=b'Indicates when the user signed the Data Facility Terms of User', null=True)),
('ldap_last_auth_time', models.DateTimeField(blank=True, editable=False, null=True, verbose_name=b'Last Access')),
('ldap_lock_time', models.DateTimeField(blank=True, editable=False, null=True)),
('ldap_last_pwd_change', models.DateTimeField(blank=True, editable=False, null=True, verbose_name=b'Last Password Change')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('system_user', models.BooleanField(default=False, help_text=b'Is it a system user?')),
('tags', models.ManyToManyField(blank=True, to='data_facility_admin.ProfileTag')),
],
options={
'ordering': ['last_name', 'first_name'],
},
bases=('data_facility_admin.ldapobject',),
),
migrations.AddField(
model_name='signedtermsofuse',
name='terms_of_use',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.TermsOfUse'),
),
migrations.AddField(
model_name='projectmember',
name='role',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.ProjectRole'),
),
migrations.AddField(
model_name='historicalusertraining',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalusertraining',
name='training',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.Training'),
),
migrations.AddField(
model_name='historicaluserdfrole',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaluser',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaluser',
name='ldapobject_ptr',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.LdapObject'),
),
migrations.AddField(
model_name='historicaltraining',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaltermsofuse',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalsignedtermsofuse',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalsignedtermsofuse',
name='terms_of_use',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.TermsOfUse'),
),
migrations.AddField(
model_name='historicalprojecttool',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalprojectrole',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalprojectmember',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalprojectmember',
name='role',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.ProjectRole'),
),
migrations.AddField(
model_name='historicalproject',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalproject',
name='ldapobject_ptr',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.LdapObject'),
),
migrations.AddField(
model_name='historicalproject',
name='parent_project',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.HistoricalProject'),
),
migrations.AddField(
model_name='historicalprofiletag',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaldfrole',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaldfrole',
name='ldapobject_ptr',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.LdapObject'),
),
migrations.AddField(
model_name='historicaldatasteward',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaldatasetaccess',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaldataset',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaldataset',
name='ldapobject_ptr',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.LdapObject'),
),
migrations.AddField(
model_name='historicaldataagreementsignature',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicaldataagreement',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='usertraining',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.User'),
),
migrations.AddField(
model_name='userdfrole',
name='role',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.DfRole'),
),
migrations.AddField(
model_name='userdfrole',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.User'),
),
migrations.AddField(
model_name='signedtermsofuse',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.User'),
),
migrations.AddField(
model_name='projecttool',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.Project'),
),
migrations.AddField(
model_name='projectmember',
name='member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.User'),
),
migrations.AddField(
model_name='projectmember',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.Project'),
),
migrations.AddField(
model_name='project',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.User'),
),
migrations.AddField(
model_name='project',
name='parent_project',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.Project'),
),
migrations.AddField(
model_name='historicalusertraining',
name='user',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.User'),
),
migrations.AddField(
model_name='historicaluserdfrole',
name='role',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.DfRole'),
),
migrations.AddField(
model_name='historicaluserdfrole',
name='user',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.User'),
),
migrations.AddField(
model_name='historicalsignedtermsofuse',
name='user',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.User'),
),
migrations.AddField(
model_name='historicalprojecttool',
name='project',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.Project'),
),
migrations.AddField(
model_name='historicalprojectmember',
name='member',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.User'),
),
migrations.AddField(
model_name='historicalprojectmember',
name='project',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.Project'),
),
migrations.AddField(
model_name='historicalproject',
name='instructors',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.DfRole'),
),
migrations.AddField(
model_name='historicalproject',
name='owner',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.User'),
),
migrations.AddField(
model_name='historicaldatasteward',
name='dataset',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.Dataset'),
),
migrations.AddField(
model_name='historicaldatasteward',
name='user',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.User'),
),
migrations.AddField(
model_name='historicaldatasetaccess',
name='dataset',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.Dataset'),
),
migrations.AddField(
model_name='historicaldatasetaccess',
name='project',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.Project'),
),
migrations.AddField(
model_name='historicaldataagreementsignature',
name='user',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.User'),
),
migrations.AddField(
model_name='historicaldataagreement',
name='dataset',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='data_facility_admin.Dataset'),
),
migrations.AddField(
model_name='datasteward',
name='dataset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.Dataset'),
),
migrations.AddField(
model_name='datasteward',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.User'),
),
migrations.AddField(
model_name='datasetaccess',
name='dataset',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.Dataset'),
),
migrations.AddField(
model_name='datasetaccess',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='data_facility_admin.Project'),
),
migrations.AddField(
model_name='dataagreementsignature',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.User'),
),
migrations.AddField(
model_name='dataagreement',
name='dataset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='data_facility_admin.Dataset'),
),
migrations.AlterUniqueTogether(
name='userdfrole',
unique_together=set([('user', 'role')]),
),
migrations.AlterUniqueTogether(
name='dataagreementsignature',
unique_together=set([('user', 'data_agreement')]),
),
]
| 65.537388 | 1,793 | 0.60614 | 7,137 | 65,734 | 5.415721 | 0.053244 | 0.044008 | 0.027942 | 0.038704 | 0.940003 | 0.933173 | 0.9095 | 0.902644 | 0.89584 | 0.879799 | 0 | 0.008284 | 0.247072 | 65,734 | 1,002 | 1,794 | 65.602794 | 0.772675 | 0.000989 | 0 | 0.831992 | 1 | 0.016097 | 0.247495 | 0.041559 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.002012 | 0.006036 | 0 | 0.01006 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
a2b26f568cb030a89c20ca76ea1ebacaada42f01 | 141,206 | py | Python | security-admin/scripts/db_setup.py | lw-lin/incubator-ranger | 2fb907c802ca64d0117261ef7f47ab2912578d18 | [
"Apache-2.0"
] | null | null | null | security-admin/scripts/db_setup.py | lw-lin/incubator-ranger | 2fb907c802ca64d0117261ef7f47ab2912578d18 | [
"Apache-2.0"
] | null | null | null | security-admin/scripts/db_setup.py | lw-lin/incubator-ranger | 2fb907c802ca64d0117261ef7f47ab2912578d18 | [
"Apache-2.0"
] | 2 | 2020-04-19T09:24:15.000Z | 2021-09-01T14:44:06.000Z | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
import os
import re
import sys
import errno
import shlex
import platform
import logging
import subprocess
import fileinput
from os.path import basename
from subprocess import Popen,PIPE
from datetime import date
import time
import datetime
from time import gmtime, strftime
globalDict = {}
os_name = platform.system()
os_name = os_name.upper()
jisql_debug=True
if os_name == "LINUX":
RANGER_ADMIN_HOME = os.getenv("RANGER_ADMIN_HOME")
if RANGER_ADMIN_HOME is None:
RANGER_ADMIN_HOME = os.getcwd()
elif os_name == "WINDOWS":
RANGER_ADMIN_HOME = os.getenv("RANGER_ADMIN_HOME")
def check_output(query):
if os_name == "LINUX":
p = subprocess.Popen(shlex.split(query), stdout=subprocess.PIPE)
elif os_name == "WINDOWS":
p = subprocess.Popen(query, stdout=subprocess.PIPE, shell=True)
output = p.communicate ()[0]
return output
def log(msg,type):
if type == 'info':
logging.info(" %s",msg)
if type == 'debug':
logging.debug(" %s",msg)
if type == 'warning':
logging.warning(" %s",msg)
if type == 'exception':
logging.exception(" %s",msg)
if type == 'error':
logging.error(" %s",msg)
def populate_global_dict():
global globalDict
if os_name == "LINUX":
read_config_file = open(os.path.join(RANGER_ADMIN_HOME,'install.properties'))
elif os_name == "WINDOWS":
read_config_file = open(os.path.join(RANGER_ADMIN_HOME,'bin','install_config.properties'))
library_path = os.path.join(RANGER_ADMIN_HOME,"cred","lib","*")
for each_line in read_config_file.read().split('\n') :
if len(each_line) == 0 : continue
if re.search('=', each_line):
key , value = each_line.strip().split("=",1)
key = key.strip()
if 'PASSWORD' in key:
jceks_file_path = os.path.join(RANGER_ADMIN_HOME, 'jceks','ranger_db.jceks')
#statuscode,value = call_keystore(library_path,key,'',jceks_file_path,'get')
#if statuscode == 1:
value = ''
value = value.strip()
globalDict[key] = value
def jisql_log(query, db_password):
if jisql_debug == True:
if os_name == "WINDOWS":
query = query.replace(' -p "'+db_password+'"' , ' -p "********"')
log("[JISQL] "+query, "info")
else:
query = query.replace(" -p '"+db_password+"'" , " -p '********'")
log("[JISQL] "+query, "info")
def password_validation(password):
if password:
if re.search("[\\\`'\"]",password):
log("[E] password contains one of the unsupported special characters like \" ' \ `","error")
sys.exit(1)
class BaseDB(object):
def check_connection(self, db_name, db_user, db_password):
log("[I] ---------- Verifying DB connection ----------", "info")
def check_table(self, db_name, db_user, db_password, TABLE_NAME):
log("[I] ---------- Verifying table ----------", "info")
def import_db_file(self, db_name, db_user, db_password, file_name):
log("[I] ---------- Importing db schema ----------", "info")
def upgrade_db(self, db_name, db_user, db_password, DBVERSION_CATALOG_CREATION):
self.import_db_file(db_name, db_user, db_password, DBVERSION_CATALOG_CREATION)
log("[I] Baseline DB upgraded successfully", "info")
def apply_patches(self, db_name, db_user, db_password, PATCHES_PATH):
#first get all patches and then apply each patch
if not os.path.exists(PATCHES_PATH):
log("[I] No patches to apply!","info")
else:
# files: coming from os.listdir() sorted alphabetically, thus not numerically
files = os.listdir(PATCHES_PATH)
if files:
sorted_files = sorted(files, key=lambda x: str(x.split('.')[0]))
for filename in sorted_files:
currentPatch = os.path.join(PATCHES_PATH, filename)
self.import_db_patches(db_name, db_user, db_password, currentPatch)
else:
log("[I] No patches to apply!","info")
def auditdb_operation(self, xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] ----------------- Audit DB operations ------------", "info")
def apply_auditdb_patches(self, xa_sqlObj,xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, PATCHES_PATH, TABLE_NAME):
#first get all patches and then apply each patch
if not os.path.exists(PATCHES_PATH):
log("[I] No patches to apply!","info")
else:
# files: coming from os.listdir() sorted alphabetically, thus not numerically
files = os.listdir(PATCHES_PATH)
if files:
sorted_files = sorted(files, key=lambda x: str(x.split('.')[0]))
for filename in sorted_files:
currentPatch = os.path.join(PATCHES_PATH, filename)
self.import_auditdb_patches(xa_sqlObj, xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, currentPatch, TABLE_NAME)
else:
log("[I] No patches to apply!","info")
def execute_java_patches(xa_db_host, db_user, db_password, db_name):
log("[I] ----------------- Executing java patches ------------", "info")
def create_synonym(db_name, db_user, db_password,audit_db_user):
log("[I] ----------------- Creating Synonym ------------", "info")
def change_admin_default_password(xa_db_host, db_user, db_password, db_name,userName,oldPassword,newPassword):
log("[I] ----------------- Changing Ranger admin default password ------------", "info")
class MysqlConf(BaseDB):
# Constructor
def __init__(self, host,SQL_CONNECTOR_JAR,JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password ,db_name):
path = RANGER_ADMIN_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
if os_name == "LINUX":
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -driver mysqlconj -cstring jdbc:mysql://%s/%s -u '%s' -p '%s' -noheader -trim -c \;" %(self.JAVA_BIN,self.SQL_CONNECTOR_JAR,path,self.host,db_name,user,password)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\jisql\\lib\\* org.apache.util.sql.Jisql -driver mysqlconj -cstring jdbc:mysql://%s/%s -u \"%s\" -p \"%s\" -noheader -trim" %(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, self.host, db_name, user, password)
return jisql_cmd
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection..", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"SELECT version();\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT version();\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('Production |'):
log("[I] Checking connection passed.", "info")
return True
else:
log("[E] Can't establish connection!! Exiting.." ,"error")
log("[I] Please run DB setup first or contact Administrator.." ,"info")
sys.exit(1)
def grant_audit_db_user(self, db_user, audit_db_name, audit_db_user, audit_db_password, db_password,TABLE_NAME):
hosts_arr =["%", "localhost"]
hosts_arr.append(self.host)
for host in hosts_arr:
log("[I] ---------------Granting privileges TO '"+ audit_db_user + "' on '" + audit_db_name+"'-------------" , "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, audit_db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"GRANT INSERT ON %s.%s TO '%s'@'%s';\"" %(audit_db_name,TABLE_NAME,audit_db_user,host)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT INSERT ON %s.%s TO '%s'@'%s';\" -c ;" %(audit_db_name,TABLE_NAME,audit_db_user,host)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Granting privileges to '" + audit_db_user+"' done on '"+ audit_db_name+"'", "info")
else:
log("[E] Granting privileges to '" +audit_db_user+"' failed on '" + audit_db_name+"'", "error")
sys.exit(1)
def import_db_file(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
log("[I] Importing db schema to database " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s -c ;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " DB schema imported successfully","info")
else:
log("[E] "+name + " DB schema import failed!","error")
sys.exit(1)
else:
log("[E] DB schema file " + name+ " not found","error")
sys.exit(1)
def import_db_patches(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', now(), user(), now(), user(),'N') ;\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', now(), user(), now(), user(),'N') ;\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
sys.exit(1)
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s -c ;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
def import_auditdb_patches(self, xa_sqlObj,xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Checking XA_ACCESS_AUDIT table to apply audit db patches --------- ","info")
output = self.check_table(audit_db_name, db_user, db_password, TABLE_NAME)
if output == True:
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + audit_db_name + " from file: " + name,"info")
get_cmd1 = xa_sqlObj.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', now(), user(), now(), user(),'N') ;\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', now(), user(), now(), user(),'N') ;\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
sys.exit(1)
get_cmd2 = self.get_jisql_cmd(db_user, db_password, audit_db_name)
if os_name == "LINUX":
query = get_cmd2 + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd2 + " -input %s -c ;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
else:
log("[I] Table XA_ACCESS_AUDIT does not exists in " +audit_db_name,"error")
sys.exit(1)
def check_table(self, db_name, db_user, db_password, TABLE_NAME):
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"show tables like '%s';\"" %(TABLE_NAME)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"show tables like '%s';\" -c ;" %(TABLE_NAME)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(TABLE_NAME + " |"):
log("[I] Table " + TABLE_NAME +" already exists in database '" + db_name + "'","info")
return True
else:
log("[I] Table " + TABLE_NAME +" does not exist in database " + db_name + "","info")
return False
def auditdb_operation(self, xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Check ranger user connection ---------","info")
self.check_connection(audit_db_name, db_user, db_password)
log("[I] --------- Check audit table exists --------- ","info")
output = self.check_table(audit_db_name, db_user, db_password, TABLE_NAME)
if output == False:
self.import_db_file(audit_db_name ,db_user, db_password, file_name)
self.grant_audit_db_user(db_user, audit_db_name, audit_db_user, audit_db_password, db_password,TABLE_NAME)
def execute_java_patches(self, xa_db_host, db_user, db_password, db_name):
my_dict = {}
version = ""
className = ""
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
javaFiles = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch")
if not os.path.exists(javaFiles):
log("[I] No java patches to apply!","info")
else:
files = os.listdir(javaFiles)
if files:
for filename in files:
f = re.match("^Patch.*?.class$",filename)
if f:
className = re.match("(Patch.*?)_.*.class",filename)
className = className.group(1)
version = re.match("Patch.*?_(.*).class",filename)
version = version.group(1)
key3 = int(version.strip("J"))
my_dict[key3] = filename
keylist = my_dict.keys()
keylist.sort()
for key in keylist:
#print "%s: %s" % (key, my_dict[key])
version = str(key)
className = my_dict[key]
className = className.strip(".class")
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Java patch "+ className +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Java patch "+ className +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', now(), user(), now(), user(),'N') ;\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', now(), user(), now(), user(),'N') ;\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is being applied..","info")
else:
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.%s"%(self.JAVA_BIN,ranger_log,path,className)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is applied..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] applying java patch "+ className +" failed", "error")
sys.exit(1)
def change_admin_default_password(self, xa_db_host, db_user, db_password, db_name,userName,oldPassword,newPassword):
my_dict = {}
version = ""
className = "ChangePasswordUtil"
version = 'DEFAULT_ADMIN_UPDATE'
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
filePath = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch","cliutil","ChangePasswordUtil.class")
if os.path.exists(filePath):
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Ranger admin default password has already been changed!!","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Ranger Password change utility is being executed by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', now(), user(), now(), user(),'N') ;\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', now(), user(), now(), user(),'N') ;\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request is in process..","info")
else:
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.cliutil.%s %s %s %s"%(self.JAVA_BIN,ranger_log,path,className,userName,oldPassword,newPassword)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request processed successfully..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
class OracleConf(BaseDB):
# Constructor
def __init__(self, host, SQL_CONNECTOR_JAR, JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password):
path = RANGER_ADMIN_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
if not re.search('-Djava.security.egd=file:///dev/urandom', self.JAVA_BIN):
self.JAVA_BIN = self.JAVA_BIN + " -Djava.security.egd=file:///dev/urandom "
#if self.host.count(":") == 2:
if self.host.count(":") == 2 or self.host.count(":") == 0:
#jdbc:oracle:thin:@[HOST][:PORT]:SID or #jdbc:oracle:thin:@GL
cstring="jdbc:oracle:thin:@%s" %(self.host)
else:
#jdbc:oracle:thin:@//[HOST][:PORT]/SERVICE
cstring="jdbc:oracle:thin:@//%s" %(self.host)
if os_name == "LINUX":
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -driver oraclethin -cstring %s -u '%s' -p '%s' -noheader -trim" %(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, cstring, user, password)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\jisql\\lib\\* org.apache.util.sql.Jisql -driver oraclethin -cstring %s -u \"%s\" -p \"%s\" -noheader -trim" %(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, cstring, user, password)
return jisql_cmd
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select * from v$version;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select * from v$version;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('Production |'):
log("[I] Connection success", "info")
return True
else:
log("[E] Can't establish connection!", "error")
sys.exit(1)
def grant_audit_db_user(self, audit_db_name ,db_user,audit_db_user,db_password,audit_db_password):
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query 'GRANT SELECT ON %s.XA_ACCESS_AUDIT_SEQ TO %s;'" % (db_user,audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT SELECT ON %s.XA_ACCESS_AUDIT_SEQ TO %s;\" -c ;" % (db_user,audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0:
sys.exit(1)
if os_name == "LINUX":
query = get_cmd + " -c \; -query 'GRANT INSERT ON %s.XA_ACCESS_AUDIT TO %s;'" % (db_user,audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT INSERT ON %s.XA_ACCESS_AUDIT TO %s;\" -c ;" % (db_user,audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0:
sys.exit(1)
def import_db_file(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
log("[I] Importing script " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -input %s -c \;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s -c ;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " imported successfully","info")
else:
log("[E] "+name + " import failed!","error")
sys.exit(1)
def create_synonym(self,db_name, db_user, db_password,audit_db_user):
log("[I] ----------------- Creating Synonym ------------", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query 'CREATE OR REPLACE SYNONYM %s.XA_ACCESS_AUDIT FOR %s.XA_ACCESS_AUDIT;'" % (audit_db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"CREATE OR REPLACE SYNONYM %s.XA_ACCESS_AUDIT FOR %s.XA_ACCESS_AUDIT;\" -c ;" % (audit_db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0:
sys.exit(1)
if os_name == "LINUX":
query = get_cmd + " -c \; -query 'CREATE OR REPLACE SYNONYM %s.XA_ACCESS_AUDIT_SEQ FOR %s.XA_ACCESS_AUDIT_SEQ;'" % (audit_db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"CREATE OR REPLACE SYNONYM %s.XA_ACCESS_AUDIT_SEQ FOR %s.XA_ACCESS_AUDIT_SEQ;\" -c ;" % (audit_db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0:
sys.exit(1)
def import_db_patches(self, db_name, db_user, db_password, file_name):
if os.path.isfile(file_name):
name = basename(file_name)
version = name.split('-')[0]
log("[I] Executing patch on " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version +" |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version +" |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by,active) values ( X_DB_VERSION_H_SEQ.nextval,'%s', sysdate, '%s', sysdate, '%s','N');\"" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by,active) values ( X_DB_VERSION_H_SEQ.nextval,'%s', sysdate, '%s', sysdate, '%s','N');\" -c ;" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
if os_name == "LINUX":
query = get_cmd + " -input %s -c /" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s -c /" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " Import failed!","error")
sys.exit(1)
def import_auditdb_patches(self, xa_sqlObj,xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Checking XA_ACCESS_AUDIT table to apply audit db patches --------- ","info")
output = self.check_table(db_name, db_user, db_password, TABLE_NAME)
if output == True:
if os.path.isfile(file_name):
name = basename(file_name)
version = name.split('-')[0]
log("[I] Executing patch on " + audit_db_name + " from file: " + name,"info")
get_cmd1 = xa_sqlObj.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd1 + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version +" |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd1 + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version +" |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd1 + " -c \; -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by,active) values ( X_DB_VERSION_H_SEQ.nextval,'%s', sysdate, '%s', sysdate, '%s','N');\"" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by,active) values ( X_DB_VERSION_H_SEQ.nextval,'%s', sysdate, '%s', sysdate, '%s','N');\" -c ;" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
get_cmd2 = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd2 + " -input %s -c /" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd2 + " -input %s -c /" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd1 + " -c \; -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd1 + " -c \; -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " Import failed!","error")
sys.exit(1)
else:
log("[I] Patch file not found","error")
sys.exit(1)
def check_table(self, db_name, db_user, db_password, TABLE_NAME):
get_cmd = self.get_jisql_cmd(db_user ,db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query 'select default_tablespace from user_users;'"
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select default_tablespace from user_users;\" -c ;"
jisql_log(query, db_password)
output = check_output(query).strip()
output = output.strip(' |')
db_name = db_name.upper()
if output == db_name:
log("[I] User name " + db_user + " and tablespace " + db_name + " already exists.","info")
log("[I] Verifying table " + TABLE_NAME +" in tablespace " + db_name, "info")
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select UPPER(table_name) from all_tables where UPPER(tablespace_name)=UPPER('%s') and UPPER(table_name)=UPPER('%s');\"" %(db_name ,TABLE_NAME)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select UPPER(table_name) from all_tables where UPPER(tablespace_name)=UPPER('%s') and UPPER(table_name)=UPPER('%s');\" -c ;" %(db_name ,TABLE_NAME)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(TABLE_NAME.upper() + ' |'):
log("[I] Table " + TABLE_NAME +" already exists in tablespace " + db_name + "","info")
return True
else:
log("[I] Table " + TABLE_NAME +" does not exist in tablespace " + db_name + "","info")
return False
else:
log("[E] "+db_user + " user already assigned to some other tablespace , provide different DB name.","error")
sys.exit(1)
def auditdb_operation(self, xa_db_host , audit_db_host , db_name ,audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Check admin user connection ---------","info")
self.check_connection(db_name, db_user, db_password)
log("[I] --------- Check audit user connection ---------","info")
self.check_connection(audit_db_name, audit_db_user, audit_db_password)
log("[I] --------- Check table ---------","info")
if self.check_table(db_name, db_user, db_password, TABLE_NAME):
pass
else:
self.import_db_file(audit_db_name, db_user, db_password ,file_name)
log("[I] ---------------Granting privileges TO '"+ audit_db_user + "' on audit table-------------" , "info")
self.grant_audit_db_user( audit_db_name ,db_user, audit_db_user, db_password,audit_db_password)
def execute_java_patches(self, xa_db_host, db_user, db_password, db_name):
my_dict = {}
version = ""
className = ""
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
javaFiles = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch")
if not os.path.exists(javaFiles):
log("[I] No java patches to apply!","info")
else:
files = os.listdir(javaFiles)
if files:
for filename in files:
f = re.match("^Patch.*?.class$",filename)
if f:
className = re.match("(Patch.*?)_.*.class",filename)
className = className.group(1)
version = re.match("Patch.*?_(.*).class",filename)
version = version.group(1)
key3 = int(version.strip("J"))
my_dict[key3] = filename
keylist = my_dict.keys()
keylist.sort()
for key in keylist:
#print "%s: %s" % (key, my_dict[key])
version = str(key)
className = my_dict[key]
className = className.strip(".class")
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] java patch "+ className +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Java patch "+ className +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by) values ( X_DB_VERSION_H_SEQ.nextval,'J%s', sysdate, '%s', sysdate, '%s');\"" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by) values ( X_DB_VERSION_H_SEQ.nextval,'J%s', sysdate, '%s', sysdate, '%s');\" -c ;" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is being applied..","info")
else:
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_cmd = "%s -Djava.security.egd=file:///dev/urandom -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.%s"%(self.JAVA_BIN,ranger_log,path,className)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is applied..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"delete from x_db_version_h where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"delete from x_db_version_h where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] applying java patch "+ className +" failed", "error")
sys.exit(1)
def change_admin_default_password(self, xa_db_host, db_user, db_password, db_name,userName,oldPassword,newPassword):
my_dict = {}
version = ""
className = "ChangePasswordUtil"
version = 'DEFAULT_ADMIN_UPDATE'
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
filePath = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch","cliutil","ChangePasswordUtil.class")
if os.path.exists(filePath):
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Ranger admin default password has already been changed!!","info")
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Ranger Password change utility is being executed by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by) values ( X_DB_VERSION_H_SEQ.nextval,'%s', sysdate, '%s', sysdate, '%s');\"" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (id,version, inst_at, inst_by, updated_at, updated_by) values ( X_DB_VERSION_H_SEQ.nextval,'%s', sysdate, '%s', sysdate, '%s');\" -c ;" %(version, db_user, db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request is in process..","info")
else:
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.cliutil.%s %s %s %s"%(self.JAVA_BIN,ranger_log,path,className,userName,oldPassword,newPassword)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request processed successfully..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
class PostgresConf(BaseDB):
# Constructor
def __init__(self, host, SQL_CONNECTOR_JAR, JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password, db_name):
#TODO: User array for forming command
path = RANGER_ADMIN_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
if os_name == "LINUX":
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -driver postgresql -cstring jdbc:postgresql://%s/%s -u %s -p '%s' -noheader -trim -c \;" %(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, self.host, db_name, user, password)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\jisql\\lib\\* org.apache.util.sql.Jisql -driver postgresql -cstring jdbc:postgresql://%s/%s -u %s -p \"%s\" -noheader -trim" %(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, self.host, db_name, user, password)
return jisql_cmd
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"SELECT 1;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT 1;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('1 |'):
log("[I] connection success", "info")
return True
else:
log("[E] Can't establish connection", "error")
sys.exit(1)
def import_db_file(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
log("[I] Importing db schema to database " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s -c ;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " DB schema imported successfully","info")
else:
log("[E] "+name + " DB schema import failed!","error")
sys.exit(1)
def grant_audit_db_user(self, audit_db_name , db_user, audit_db_user, db_password, audit_db_password):
log("[I] Granting permission to " + audit_db_user, "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, audit_db_name)
log("[I] Granting select and usage privileges to Postgres audit user '" + audit_db_user + "' on XA_ACCESS_AUDIT_SEQ", "info")
if os_name == "LINUX":
query = get_cmd + " -query 'GRANT SELECT,USAGE ON XA_ACCESS_AUDIT_SEQ TO %s;'" % (audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT SELECT,USAGE ON XA_ACCESS_AUDIT_SEQ TO %s;\" -c ;" % (audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0:
log("[E] Granting select privileges to Postgres user '" + audit_db_user + "' failed", "error")
sys.exit(1)
log("[I] Granting insert privileges to Postgres audit user '" + audit_db_user + "' on XA_ACCESS_AUDIT table", "info")
if os_name == "LINUX":
query = get_cmd + " -query 'GRANT INSERT ON XA_ACCESS_AUDIT TO %s;'" % (audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT INSERT ON XA_ACCESS_AUDIT TO %s;\" -c ;" % (audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0:
log("[E] Granting insert privileges to Postgres user '" + audit_db_user + "' failed", "error")
sys.exit(1)
def create_language_plpgsql(self,db_user, db_password, db_name):
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"SELECT 1 FROM pg_catalog.pg_language WHERE lanname='plpgsql';\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT 1 FROM pg_catalog.pg_language WHERE lanname='plpgsql';\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if not output.strip('1 |'):
if os_name == "LINUX":
query = get_cmd + " -query \"CREATE LANGUAGE plpgsql;\""
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"CREATE LANGUAGE plpgsql;\" -c ;"
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] LANGUAGE plpgsql created successfully", "info")
else:
log("[E] LANGUAGE plpgsql creation failed", "error")
sys.exit(1)
def import_db_patches(self, db_name, db_user, db_password, file_name):
self.create_language_plpgsql(db_user, db_password, db_name)
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\"" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s -c ;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
def import_auditdb_patches(self, xa_sqlObj,xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Checking XA_ACCESS_AUDIT table to apply audit db patches --------- ","info")
self.create_language_plpgsql(db_user, db_password, audit_db_name)
output = self.check_table(audit_db_name, db_user, db_password, TABLE_NAME)
if output == True:
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + audit_db_name + " from file: " + name,"info")
get_cmd1 = xa_sqlObj.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\"" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
get_cmd2 = self.get_jisql_cmd(db_user, db_password, audit_db_name)
if os_name == "LINUX":
query = get_cmd2 + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd2 + " -input %s -c ;" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
else:
log("[I] Table XA_ACCESS_AUDIT does not exists in " +audit_db_name,"error")
sys.exit(1)
def check_table(self, db_name, db_user, db_password, TABLE_NAME):
log("[I] Verifying table " + TABLE_NAME +" in database " + db_name, "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select * from (select table_name from information_schema.tables where table_catalog='%s' and table_name = '%s') as temp;\"" %(db_name , TABLE_NAME)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select * from (select table_name from information_schema.tables where table_catalog='%s' and table_name = '%s') as temp;\" -c ;" %(db_name , TABLE_NAME)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(TABLE_NAME +" |"):
log("[I] Table " + TABLE_NAME +" already exists in database " + db_name, "info")
return True
else:
log("[I] Table " + TABLE_NAME +" does not exist in database " + db_name, "info")
return False
def auditdb_operation(self, xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Check admin user connection ---------","info")
self.check_connection(audit_db_name, db_user, db_password)
log("[I] --------- Check audit user connection ---------","info")
self.check_connection(audit_db_name, audit_db_user, audit_db_password)
log("[I] --------- Check table ---------","info")
output = self.check_table(audit_db_name, audit_db_user, audit_db_password, TABLE_NAME)
if output == False:
self.import_db_file(audit_db_name, db_user, db_password, file_name)
self.grant_audit_db_user(audit_db_name ,db_user, audit_db_user, db_password,audit_db_password)
def execute_java_patches(self, xa_db_host, db_user, db_password, db_name):
my_dict = {}
version = ""
className = ""
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
javaFiles = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch")
if not os.path.exists(javaFiles):
log("[I] No java patches to apply!","info")
else:
files = os.listdir(javaFiles)
if files:
for filename in files:
f = re.match("^Patch.*?.class$",filename)
if f:
className = re.match("(Patch.*?)_.*.class",filename)
className = className.group(1)
version = re.match("Patch.*?_(.*).class",filename)
version = version.group(1)
key3 = int(version.strip("J"))
my_dict[key3] = filename
keylist = my_dict.keys()
keylist.sort()
for key in keylist:
#print "%s: %s" % (key, my_dict[key])
version = str(key)
className = my_dict[key]
className = className.strip(".class")
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Java patch "+ className +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Java patch "+ className +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\"" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is being applied..","info")
else:
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.%s"%(self.JAVA_BIN,ranger_log,path,className)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is applied..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] applying java patch "+ className +" failed", "error")
sys.exit(1)
def change_admin_default_password(self, xa_db_host, db_user, db_password, db_name,userName,oldPassword,newPassword):
my_dict = {}
version = ""
className = "ChangePasswordUtil"
version = 'DEFAULT_ADMIN_UPDATE'
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
filePath = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch","cliutil","ChangePasswordUtil.class")
if os.path.exists(filePath):
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Ranger admin default password has already been changed!!","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Ranger Password change utility is being executed by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\"" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', current_timestamp, '%s', current_timestamp, '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request is in process..","info")
else:
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.cliutil.%s %s %s %s"%(self.JAVA_BIN,ranger_log,path,className,userName,oldPassword,newPassword)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request processed successfully..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\"" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
class SqlServerConf(BaseDB):
# Constructor
def __init__(self, host, SQL_CONNECTOR_JAR, JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password, db_name):
#TODO: User array for forming command
path = RANGER_ADMIN_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
if os_name == "LINUX":
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -user %s -p '%s' -driver mssql -cstring jdbc:sqlserver://%s\\;databaseName=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, user, password, self.host,db_name)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\\jisql\\lib\\* org.apache.util.sql.Jisql -user %s -p \"%s\" -driver mssql -cstring jdbc:sqlserver://%s;databaseName=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, user, password, self.host,db_name)
return jisql_cmd
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"SELECT 1;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT 1;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('1 |'):
log("[I] Connection success", "info")
return True
else:
log("[E] Can't establish connection", "error")
sys.exit(1)
def import_db_file(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
log("[I] Importing db schema to database " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " DB schema imported successfully","info")
else:
log("[E] "+name + " DB Schema import failed!","error")
sys.exit(1)
def check_table(self, db_name, db_user, db_password, TABLE_NAME):
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"SELECT TABLE_NAME FROM information_schema.tables where table_name = '%s';\"" %(TABLE_NAME)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT TABLE_NAME FROM information_schema.tables where table_name = '%s';\" -c ;" %(TABLE_NAME)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(TABLE_NAME + " |"):
log("[I] Table '" + TABLE_NAME + "' already exists in database '" + db_name + "'","info")
return True
else:
log("[I] Table '" + TABLE_NAME + "' does not exist in database '" + db_name + "'","info")
return False
def grant_audit_db_user(self, audit_db_name, db_user, audit_db_user, db_password, audit_db_password,TABLE_NAME):
log("[I] Granting permission to audit user '" + audit_db_user + "' on db '" + audit_db_name + "'","info")
get_cmd = self.get_jisql_cmd(db_user, db_password,audit_db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"USE %s GRANT SELECT,INSERT to %s;\"" %(audit_db_name ,audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"USE %s GRANT SELECT,INSERT to %s;\" -c ;" %(audit_db_name ,audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0 :
sys.exit(1)
else:
log("[I] Permission granted to audit user " + audit_db_user , "info")
def import_db_patches(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
def import_auditdb_patches(self, xa_sqlObj,xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Checking XA_ACCESS_AUDIT table to apply audit db patches --------- ","info")
output = self.check_table(audit_db_name, db_user, db_password, TABLE_NAME)
if output == True:
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + audit_db_name + " from file: " + name,"info")
get_cmd1 = xa_sqlObj.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd1 + " -c \; query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd1 + " -c \; query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
get_cmd2 = self.get_jisql_cmd(db_user, db_password, audit_db_name)
if os_name == "LINUX":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
if os_name == "LINUX":
query = get_cmd2 + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd2 + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
else:
log("[I] Table XA_ACCESS_AUDIT does not exists in " +audit_db_name,"error")
sys.exit(1)
def auditdb_operation(self, xa_db_host, audit_db_host, db_name, audit_db_name,db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Check admin user connection --------- ","info")
self.check_connection(audit_db_name, db_user, db_password)
log("[I] --------- Check audit user connection --------- ","info")
self.check_connection(audit_db_name, audit_db_user, audit_db_password)
log("[I] --------- Check audit table exists --------- ","info")
output = self.check_table(audit_db_name, db_user, db_password, TABLE_NAME)
if output == False:
self.import_db_file(audit_db_name ,db_user, db_password, file_name)
self.grant_audit_db_user( audit_db_name ,db_user, audit_db_user, db_password,audit_db_password,TABLE_NAME)
def execute_java_patches(self, xa_db_host, db_user, db_password, db_name):
my_dict = {}
version = ""
className = ""
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
javaFiles = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch")
if not os.path.exists(javaFiles):
log("[I] No java patches to apply!","info")
else:
files = os.listdir(javaFiles)
if files:
for filename in files:
f = re.match("^Patch.*?.class$",filename)
if f:
className = re.match("(Patch.*?)_.*.class",filename)
className = className.group(1)
version = re.match("Patch.*?_(.*).class",filename)
version = version.group(1)
key3 = int(version.strip("J"))
my_dict[key3] = filename
keylist = my_dict.keys()
keylist.sort()
for key in keylist:
#print "%s: %s" % (key, my_dict[key])
version = str(key)
className = my_dict[key]
className = className.strip(".class")
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Java patch "+ className +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Java patch "+ className +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is being applied..","info")
else:
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.%s"%(self.JAVA_BIN,ranger_log,path,className)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is applied..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] applying java patch "+ className +" failed", "error")
sys.exit(1)
def change_admin_default_password(self, xa_db_host, db_user, db_password, db_name,userName,oldPassword,newPassword):
my_dict = {}
version = ""
className = "ChangePasswordUtil"
version = 'DEFAULT_ADMIN_UPDATE'
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
filePath = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch","cliutil","ChangePasswordUtil.class")
if os.path.exists(filePath):
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Ranger admin default password has already been changed!!","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Ranger Password change utility is being executed by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request is in process..","info")
else:
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.cliutil.%s %s %s %s"%(self.JAVA_BIN,ranger_log,path,className,userName,oldPassword,newPassword)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request processed successfully..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
class SqlAnywhereConf(BaseDB):
# Constructor
def __init__(self, host, SQL_CONNECTOR_JAR, JAVA_BIN):
self.host = host
self.SQL_CONNECTOR_JAR = SQL_CONNECTOR_JAR
self.JAVA_BIN = JAVA_BIN
def get_jisql_cmd(self, user, password, db_name):
path = RANGER_ADMIN_HOME
self.JAVA_BIN = self.JAVA_BIN.strip("'")
if os_name == "LINUX":
jisql_cmd = "%s -cp %s:%s/jisql/lib/* org.apache.util.sql.Jisql -user %s -password '%s' -driver sapsajdbc4 -cstring jdbc:sqlanywhere:database=%s;host=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path,user, password,db_name,self.host)
elif os_name == "WINDOWS":
jisql_cmd = "%s -cp %s;%s\\jisql\\lib\\* org.apache.util.sql.Jisql -user %s -password '%s' -driver sapsajdbc4 -cstring jdbc:sqlanywhere:database=%s;host=%s -noheader -trim"%(self.JAVA_BIN, self.SQL_CONNECTOR_JAR, path, user, password,db_name,self.host)
return jisql_cmd
def check_connection(self, db_name, db_user, db_password):
log("[I] Checking connection", "info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"SELECT 1;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT 1;\" -c ;"
jisql_log(query, db_password)
output = check_output(query)
if output.strip('1 |'):
log("[I] Connection success", "info")
return True
else:
log("[E] Can't establish connection", "error")
sys.exit(1)
def import_db_file(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
log("[I] Importing db schema to database " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " DB schema imported successfully","info")
else:
log("[E] "+name + " DB Schema import failed!","error")
sys.exit(1)
def check_table(self, db_name, db_user, db_password, TABLE_NAME):
self.set_options(db_name, db_user, db_password, TABLE_NAME)
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"SELECT name FROM sysobjects where name = '%s' and type='U';\"" %(TABLE_NAME)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"SELECT name FROM sysobjects where name = '%s' and type='U';\" -c ;" %(TABLE_NAME)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(TABLE_NAME + " |"):
log("[I] Table '" + TABLE_NAME + "' already exists in database '" + db_name + "'","info")
return True
else:
log("[I] Table '" + TABLE_NAME + "' does not exist in database '" + db_name + "'","info")
return False
def grant_audit_db_user(self, audit_db_name, db_user, audit_db_user, db_password, audit_db_password,TABLE_NAME):
log("[I] Granting permission to audit user '" + audit_db_user + "' on db '" + audit_db_name + "'","info")
get_cmd = self.get_jisql_cmd(db_user, db_password,audit_db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"GRANT INSERT ON XA_ACCESS_AUDIT to %s;\"" %(audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"GRANT INSERT ON XA_ACCESS_AUDIT to %s;\" -c ;" %(audit_db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret != 0 :
sys.exit(1)
else:
log("[I] Permission granted to audit user " + audit_db_user , "info")
def import_db_patches(self, db_name, db_user, db_password, file_name):
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + db_name + " from file: " + name,"info")
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while output.strip(version + " |"):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
if os_name == "LINUX":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] "+name + " patch applied","info")
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
def import_auditdb_patches(self, xa_sqlObj,xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Checking XA_ACCESS_AUDIT table to apply audit db patches --------- ","info")
output = self.check_table(audit_db_name, db_user, db_password, TABLE_NAME)
if output == True:
name = basename(file_name)
if os.path.isfile(file_name):
version = name.split('-')[0]
log("[I] Executing patch on " + audit_db_name + " from file: " + name,"info")
get_cmd1 = xa_sqlObj.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd1 + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Patch "+ name +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd1 + " -c \; -query \"select version from x_db_version_h where version = '%s' and active = 'N';\"" %(version)
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while output.strip(version + " |"):
log("[I] Patch "+ name +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Patch "+ name +" is being applied..","info")
else:
log("[E] Patch "+ name +" failed", "error")
get_cmd2 = self.get_jisql_cmd(db_user, db_password, audit_db_name)
if os_name == "LINUX":
query = get_cmd2 + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd2 + " -input %s" %file_name
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
if os_name == "LINUX":
log("[I] "+name + " patch applied","info")
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd1 + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log("[I] Patch version updated", "info")
else:
log("[E] Updating patch version failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] "+name + " import failed!","error")
sys.exit(1)
else:
log("[I] Table XA_ACCESS_AUDIT does not exists in " +audit_db_name,"error")
sys.exit(1)
def auditdb_operation(self, xa_db_host, audit_db_host, db_name, audit_db_name,db_user, audit_db_user, db_password, audit_db_password, file_name, TABLE_NAME):
log("[I] --------- Check admin user connection --------- ","info")
self.check_connection(audit_db_name, db_user, db_password)
log("[I] --------- Check audit user connection --------- ","info")
self.check_connection(audit_db_name, audit_db_user, audit_db_password)
log("[I] --------- Check audit table exists --------- ","info")
output = self.check_table(audit_db_name, db_user, db_password, TABLE_NAME)
if output == False:
self.import_db_file(audit_db_name ,db_user, db_password, file_name)
self.grant_audit_db_user( audit_db_name ,db_user, audit_db_user, db_password,audit_db_password,TABLE_NAME)
def execute_java_patches(self, xa_db_host, db_user, db_password, db_name):
my_dict = {}
version = ""
className = ""
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
javaFiles = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch")
if not os.path.exists(javaFiles):
log("[I] No java patches to apply!","info")
else:
files = os.listdir(javaFiles)
if files:
for filename in files:
f = re.match("^Patch.*?.class$",filename)
if f:
className = re.match("(Patch.*?)_.*.class",filename)
className = className.group(1)
version = re.match("Patch.*?_(.*).class",filename)
version = version.group(1)
key3 = int(version.strip("J"))
my_dict[key3] = filename
keylist = my_dict.keys()
keylist.sort()
for key in keylist:
#print "%s: %s" % (key, my_dict[key])
version = str(key)
className = my_dict[key]
className = className.strip(".class")
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Java patch "+ className +" is already applied" ,"info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = 'J%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Java patch "+ className +" is being applied by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('J%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is being applied..","info")
else:
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.%s"%(self.JAVA_BIN,ranger_log,path,className)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] java patch "+ className +" is applied..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] java patch "+ className +" failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='J%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] applying java patch "+ className +" failed", "error")
sys.exit(1)
def set_options(self, db_name, db_user, db_password, TABLE_NAME):
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"set option public.reserved_keywords='LIMIT';\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"set option public.reserved_keywords='LIMIT';\" -c ;"
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"set option public.max_statement_count=0;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"set option public.max_statement_count=0;\" -c;"
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
if os_name == "LINUX":
query = get_cmd + " -c \; -query \"set option public.max_cursor_count=0;\""
elif os_name == "WINDOWS":
query = get_cmd + " -query \"set option public.max_cursor_count=0;\" -c;"
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
def change_admin_default_password(self, xa_db_host, db_user, db_password, db_name,userName,oldPassword,newPassword):
my_dict = {}
version = ""
className = "ChangePasswordUtil"
version = 'DEFAULT_ADMIN_UPDATE'
app_home = os.path.join(RANGER_ADMIN_HOME,"ews","webapp")
ranger_log = os.path.join(RANGER_ADMIN_HOME,"ews","logs")
filePath = os.path.join(app_home,"WEB-INF","classes","org","apache","ranger","patch","cliutil","ChangePasswordUtil.class")
if os.path.exists(filePath):
if version != "":
get_cmd = self.get_jisql_cmd(db_user, db_password, db_name)
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'Y';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
log("[I] Ranger admin default password has already been changed!!","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c \;" %(version)
elif os_name == "WINDOWS":
query = get_cmd + " -query \"select version from x_db_version_h where version = '%s' and active = 'N';\" -c ;" %(version)
jisql_log(query, db_password)
output = check_output(query)
if output.strip(version + " |"):
while(output.strip(version + " |")):
log("[I] Ranger Password change utility is being executed by some other process" ,"info")
time.sleep(300)
jisql_log(query, db_password)
output = check_output(query)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c \;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"insert into x_db_version_h (version, inst_at, inst_by, updated_at, updated_by,active) values ('%s', GETDATE(), '%s', GETDATE(), '%s','N') ;\" -c ;" %(version,db_user,db_user)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request is in process..","info")
else:
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
if os_name == "LINUX":
path = os.path.join("%s","WEB-INF","classes","conf:%s","WEB-INF","classes","lib","*:%s","WEB-INF",":%s","META-INF",":%s","WEB-INF","lib","*:%s","WEB-INF","classes",":%s","WEB-INF","classes","META-INF:%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
elif os_name == "WINDOWS":
path = os.path.join("%s","WEB-INF","classes","conf;%s","WEB-INF","classes","lib","*;%s","WEB-INF",";%s","META-INF",";%s","WEB-INF","lib","*;%s","WEB-INF","classes",";%s","WEB-INF","classes","META-INF;%s" )%(app_home ,app_home ,app_home, app_home, app_home, app_home ,app_home ,self.SQL_CONNECTOR_JAR)
get_java_cmd = "%s -Dlogdir=%s -Dlog4j.configuration=db_patch.log4j.xml -cp %s org.apache.ranger.patch.cliutil.%s %s %s %s"%(self.JAVA_BIN,ranger_log,path,className,userName,oldPassword,newPassword)
if os_name == "LINUX":
ret = subprocess.call(shlex.split(get_java_cmd))
elif os_name == "WINDOWS":
ret = subprocess.call(get_java_cmd)
if ret == 0:
if os_name == "LINUX":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"update x_db_version_h set active='Y' where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
if ret == 0:
log ("[I] Ranger admin default password change request processed successfully..","info")
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
else:
if os_name == "LINUX":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c \;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(shlex.split(query))
elif os_name == "WINDOWS":
query = get_cmd + " -query \"delete from x_db_version_h where version='%s' and active='N';\" -c ;" %(version)
jisql_log(query, db_password)
ret = subprocess.call(query)
log("[E] Ranger admin default password change request failed", "error")
sys.exit(1)
def main(argv):
populate_global_dict()
FORMAT = '%(asctime)-15s %(message)s'
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
if os.environ['JAVA_HOME'] == "":
log("[E] ---------- JAVA_HOME environment property not defined, aborting installation. ----------", "error")
sys.exit(1)
else:
JAVA_BIN=os.path.join(os.environ['JAVA_HOME'],'bin','java')
if os_name == "WINDOWS" :
JAVA_BIN = JAVA_BIN+'.exe'
if os.path.isfile(JAVA_BIN):
pass
else:
JAVA_BIN=globalDict['JAVA_BIN']
if os.path.isfile(JAVA_BIN):
pass
else:
log("[E] ---------- JAVA Not Found, aborting installation. ----------", "error")
sys.exit(1)
XA_DB_FLAVOR=globalDict['DB_FLAVOR']
AUDIT_DB_FLAVOR=globalDict['DB_FLAVOR']
XA_DB_FLAVOR = XA_DB_FLAVOR.upper()
AUDIT_DB_FLAVOR = AUDIT_DB_FLAVOR.upper()
log("[I] DB FLAVOR :" + XA_DB_FLAVOR ,"info")
xa_db_host = globalDict['db_host']
audit_db_host = globalDict['db_host']
mysql_dbversion_catalog = os.path.join('db','mysql','create_dbversion_catalog.sql')
mysql_core_file = globalDict['mysql_core_file']
mysql_audit_file = globalDict['mysql_audit_file']
mysql_patches = os.path.join('db','mysql','patches')
mysql_auditdb_patches = os.path.join('db','mysql','patches','audit')
oracle_dbversion_catalog = os.path.join('db','oracle','create_dbversion_catalog.sql')
oracle_core_file = globalDict['oracle_core_file']
oracle_audit_file = globalDict['oracle_audit_file']
oracle_patches = os.path.join('db','oracle','patches')
oracle_auditdb_patches = os.path.join('db','oracle','patches','audit')
postgres_dbversion_catalog = os.path.join('db','postgres','create_dbversion_catalog.sql')
postgres_core_file = globalDict['postgres_core_file']
postgres_audit_file = globalDict['postgres_audit_file']
postgres_patches = os.path.join('db','postgres','patches')
postgres_auditdb_patches = os.path.join('db','postgres','patches','audit')
sqlserver_dbversion_catalog = os.path.join('db','sqlserver','create_dbversion_catalog.sql')
sqlserver_core_file = globalDict['sqlserver_core_file']
sqlserver_audit_file = globalDict['sqlserver_audit_file']
sqlserver_patches = os.path.join('db','sqlserver','patches')
sqlserver_auditdb_patches = os.path.join('db','sqlserver','patches','audit')
sqlanywhere_dbversion_catalog = os.path.join('db','sqlanywhere','create_dbversion_catalog.sql')
sqlanywhere_core_file = globalDict['sqlanywhere_core_file']
sqlanywhere_audit_file = globalDict['sqlanywhere_audit_file']
sqlanywhere_patches = os.path.join('db','sqlanywhere','patches')
sqlanywhere_auditdb_patches = os.path.join('db','sqlanywhere','patches','audit')
db_name = globalDict['db_name']
db_user = globalDict['db_user']
db_password = globalDict['db_password']
x_db_version = 'x_db_version_h'
xa_access_audit = 'xa_access_audit'
x_user = 'x_portal_user'
audit_db_name=''
audit_db_user=''
audit_db_password=''
audit_store = None
if 'audit_store' in globalDict:
audit_store = globalDict['audit_store']
audit_store=audit_store.lower()
if audit_store =='db':
if 'audit_db_name' in globalDict:
audit_db_name = globalDict['audit_db_name']
if 'audit_db_user' in globalDict:
audit_db_user = globalDict['audit_db_user']
if 'audit_db_password' in globalDict:
audit_db_password = globalDict['audit_db_password']
if XA_DB_FLAVOR == "MYSQL":
MYSQL_CONNECTOR_JAR=globalDict['SQL_CONNECTOR_JAR']
xa_sqlObj = MysqlConf(xa_db_host, MYSQL_CONNECTOR_JAR, JAVA_BIN)
xa_db_version_file = os.path.join(RANGER_ADMIN_HOME , mysql_dbversion_catalog)
xa_db_core_file = os.path.join(RANGER_ADMIN_HOME , mysql_core_file)
xa_patch_file = os.path.join(RANGER_ADMIN_HOME ,mysql_patches)
audit_patch_file = os.path.join(RANGER_ADMIN_HOME ,mysql_auditdb_patches)
elif XA_DB_FLAVOR == "ORACLE":
ORACLE_CONNECTOR_JAR=globalDict['SQL_CONNECTOR_JAR']
xa_sqlObj = OracleConf(xa_db_host, ORACLE_CONNECTOR_JAR, JAVA_BIN)
xa_db_version_file = os.path.join(RANGER_ADMIN_HOME ,oracle_dbversion_catalog)
xa_db_core_file = os.path.join(RANGER_ADMIN_HOME ,oracle_core_file)
xa_patch_file = os.path.join(RANGER_ADMIN_HOME ,oracle_patches)
audit_patch_file = os.path.join(RANGER_ADMIN_HOME ,oracle_auditdb_patches)
elif XA_DB_FLAVOR == "POSTGRES":
db_user=db_user.lower()
db_name=db_name.lower()
POSTGRES_CONNECTOR_JAR = globalDict['SQL_CONNECTOR_JAR']
xa_sqlObj = PostgresConf(xa_db_host, POSTGRES_CONNECTOR_JAR, JAVA_BIN)
xa_db_version_file = os.path.join(RANGER_ADMIN_HOME , postgres_dbversion_catalog)
xa_db_core_file = os.path.join(RANGER_ADMIN_HOME , postgres_core_file)
xa_patch_file = os.path.join(RANGER_ADMIN_HOME , postgres_patches)
audit_patch_file = os.path.join(RANGER_ADMIN_HOME ,postgres_auditdb_patches)
elif XA_DB_FLAVOR == "MSSQL":
SQLSERVER_CONNECTOR_JAR = globalDict['SQL_CONNECTOR_JAR']
xa_sqlObj = SqlServerConf(xa_db_host, SQLSERVER_CONNECTOR_JAR, JAVA_BIN)
xa_db_version_file = os.path.join(RANGER_ADMIN_HOME ,sqlserver_dbversion_catalog)
xa_db_core_file = os.path.join(RANGER_ADMIN_HOME , sqlserver_core_file)
xa_patch_file = os.path.join(RANGER_ADMIN_HOME , sqlserver_patches)
audit_patch_file = os.path.join(RANGER_ADMIN_HOME ,sqlserver_auditdb_patches)
elif XA_DB_FLAVOR == "SQLA":
if not os_name == "WINDOWS" :
if os.environ['LD_LIBRARY_PATH'] == "":
log("[E] ---------- LD_LIBRARY_PATH environment property not defined, aborting installation. ----------", "error")
sys.exit(1)
SQLANYWHERE_CONNECTOR_JAR = globalDict['SQL_CONNECTOR_JAR']
xa_sqlObj = SqlAnywhereConf(xa_db_host, SQLANYWHERE_CONNECTOR_JAR, JAVA_BIN)
xa_db_version_file = os.path.join(RANGER_ADMIN_HOME ,sqlanywhere_dbversion_catalog)
xa_db_core_file = os.path.join(RANGER_ADMIN_HOME , sqlanywhere_core_file)
xa_patch_file = os.path.join(RANGER_ADMIN_HOME , sqlanywhere_patches)
audit_patch_file = os.path.join(RANGER_ADMIN_HOME ,sqlanywhere_auditdb_patches)
else:
log("[E] --------- NO SUCH SUPPORTED DB FLAVOUR!! ---------", "error")
sys.exit(1)
if AUDIT_DB_FLAVOR == "MYSQL":
MYSQL_CONNECTOR_JAR=globalDict['SQL_CONNECTOR_JAR']
audit_sqlObj = MysqlConf(audit_db_host,MYSQL_CONNECTOR_JAR,JAVA_BIN)
audit_db_file = os.path.join(RANGER_ADMIN_HOME ,mysql_audit_file)
elif AUDIT_DB_FLAVOR == "ORACLE":
ORACLE_CONNECTOR_JAR=globalDict['SQL_CONNECTOR_JAR']
audit_sqlObj = OracleConf(audit_db_host, ORACLE_CONNECTOR_JAR, JAVA_BIN)
audit_db_file = os.path.join(RANGER_ADMIN_HOME , oracle_audit_file)
elif AUDIT_DB_FLAVOR == "POSTGRES":
audit_db_user=audit_db_user.lower()
audit_db_name=audit_db_name.lower()
POSTGRES_CONNECTOR_JAR = globalDict['SQL_CONNECTOR_JAR']
audit_sqlObj = PostgresConf(audit_db_host, POSTGRES_CONNECTOR_JAR, JAVA_BIN)
audit_db_file = os.path.join(RANGER_ADMIN_HOME , postgres_audit_file)
elif AUDIT_DB_FLAVOR == "MSSQL":
SQLSERVER_CONNECTOR_JAR = globalDict['SQL_CONNECTOR_JAR']
audit_sqlObj = SqlServerConf(audit_db_host, SQLSERVER_CONNECTOR_JAR, JAVA_BIN)
audit_db_file = os.path.join(RANGER_ADMIN_HOME , sqlserver_audit_file)
elif AUDIT_DB_FLAVOR == "SQLA":
SQLANYWHERE_CONNECTOR_JAR = globalDict['SQL_CONNECTOR_JAR']
audit_sqlObj = SqlAnywhereConf(audit_db_host, SQLANYWHERE_CONNECTOR_JAR, JAVA_BIN)
audit_db_file = os.path.join(RANGER_ADMIN_HOME , sqlanywhere_audit_file)
else:
log("[E] --------- NO SUCH SUPPORTED DB FLAVOUR!! ---------", "error")
sys.exit(1)
log("[I] --------- Verifying Ranger DB connection ---------","info")
xa_sqlObj.check_connection(db_name, db_user, db_password)
if len(argv)==1:
log("[I] --------- Verifying Ranger DB tables ---------","info")
if xa_sqlObj.check_table(db_name, db_user, db_password, x_user):
pass
else:
log("[I] --------- Importing Ranger Core DB Schema ---------","info")
xa_sqlObj.import_db_file(db_name, db_user, db_password, xa_db_core_file)
if XA_DB_FLAVOR == "ORACLE":
if xa_sqlObj.check_table(db_name, db_user, db_password, xa_access_audit):
if audit_db_user != "" and db_user != audit_db_user:
xa_sqlObj.create_synonym(db_name, db_user, db_password,audit_db_user)
log("[I] --------- Verifying upgrade history table ---------","info")
output = xa_sqlObj.check_table(db_name, db_user, db_password, x_db_version)
if output == False:
log("[I] --------- Creating version history table ---------","info")
xa_sqlObj.upgrade_db(db_name, db_user, db_password, xa_db_version_file)
log("[I] --------- Applying Ranger DB patches ---------","info")
xa_sqlObj.apply_patches(db_name, db_user, db_password, xa_patch_file)
if audit_store == "db" and audit_db_password!='':
log("[I] --------- Starting Audit Operation ---------","info")
audit_sqlObj.auditdb_operation(xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, audit_db_file, xa_access_audit)
log("[I] --------- Applying Audit DB patches ---------","info")
audit_sqlObj.apply_auditdb_patches(xa_sqlObj,xa_db_host, audit_db_host, db_name, audit_db_name, db_user, audit_db_user, db_password, audit_db_password, audit_patch_file, xa_access_audit)
if len(argv)>1:
for i in range(len(argv)):
if str(argv[i]) == "-javapatch":
xa_sqlObj.execute_java_patches(xa_db_host, db_user, db_password, db_name)
if str(argv[i]) == "-changepassword":
if len(argv)==5:
userName=argv[2]
oldPassword=argv[3]
newPassword=argv[4]
if oldPassword==newPassword:
log("[E] Old Password and New Password argument are same. Exiting!!", "error")
sys.exit(1)
if userName != "" and oldPassword != "" and newPassword != "":
password_validation(newPassword)
xa_sqlObj.change_admin_default_password(xa_db_host, db_user, db_password, db_name,userName,oldPassword,newPassword)
else:
log("[E] Invalid argument list.", "error")
log("[I] Usage : python db_setup.py -changepassword <loginID> <currentPassword> <newPassword>","info")
sys.exit(1)
main(sys.argv)
| 51.403713 | 308 | 0.64032 | 20,457 | 141,206 | 4.187222 | 0.021264 | 0.053001 | 0.04037 | 0.046581 | 0.940076 | 0.933503 | 0.924759 | 0.913411 | 0.903652 | 0.898784 | 0 | 0.003329 | 0.193625 | 141,206 | 2,746 | 309 | 51.422433 | 0.748948 | 0.009461 | 0 | 0.853668 | 0 | 0.007722 | 0.196633 | 0.012307 | 0 | 0 | 0 | 0.000364 | 0 | 1 | 0.028571 | false | 0.197297 | 0.025869 | 0 | 0.064865 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
a2b9441cfd51ab3d5a5060e1b9ec9024c654cb3e | 158 | py | Python | lib/buzzer.py | xxh160/pithon | 712dca6affd139176f9bba4efefcb3c5618291ba | [
"MIT"
] | null | null | null | lib/buzzer.py | xxh160/pithon | 712dca6affd139176f9bba4efefcb3c5618291ba | [
"MIT"
] | null | null | null | lib/buzzer.py | xxh160/pithon | 712dca6affd139176f9bba4efefcb3c5618291ba | [
"MIT"
] | null | null | null | import RPi.GPIO as GPIO
from util.gpio import BUZZER
def beep_on():
GPIO.output(BUZZER, GPIO.HIGH)
def beep_off():
GPIO.output(BUZZER, GPIO.LOW)
| 13.166667 | 34 | 0.702532 | 26 | 158 | 4.192308 | 0.538462 | 0.12844 | 0.293578 | 0.366972 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.183544 | 158 | 11 | 35 | 14.363636 | 0.844961 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a2bcd83aae351258ce52a378601eb43e91c0895c | 4,991 | py | Python | helpbot.py | isaacnoboa/balaguer_bot | 1b7d61db7ebfc1b9067e6ac1762b077ed259ecb8 | [
"MIT"
] | null | null | null | helpbot.py | isaacnoboa/balaguer_bot | 1b7d61db7ebfc1b9067e6ac1762b077ed259ecb8 | [
"MIT"
] | null | null | null | helpbot.py | isaacnoboa/balaguer_bot | 1b7d61db7ebfc1b9067e6ac1762b077ed259ecb8 | [
"MIT"
] | null | null | null | import config
import toolbox
import sqlite_handler as sql
def addme(update,context): #db'd
chat_id=update.effective_chat.id
user_id=update.effective_user.id
if not toolbox.user_is_in_group_or_admin(chat_id, user_id):
return()
check=sql.db.check_help_user(user_id)
output="*"
output+=""+toolbox.list_user(user_id, mention=False)
if check:
output+="* is already on the helper list."
# i don't think there may be duplicates if my bot works correctly,
# but using a database creates that possibility.
if len(check)>1 and config.verbose:
toolbok.notify_owner("warning: dupligate found for "+ str(check)+"\n"+
"\nchat_id: "+chat_id+
"\nuser_id: "+user_id
)
else:
output+="* added to the helper list at index "
output+=str(sql.db.add_help_user(user_id)[0])+"."
if config.verbose:
print(output)
context.bot.send_message(chat_id=chat_id, text=str(output), parse_mode="Markdown")
return()
# https://stackoverflow.com/a/6977901
# no idea why i added this comment but too lazy to remove it
def removeme(update,context): #db'd
chat_id=update.effective_chat.id
user_id=update.effective_user.id
if not toolbox.user_is_in_group_or_admin(chat_id, user_id):
return()
check=sql.db.check_help_user(user_id)
output=""
output+="*"+toolbox.list_user(user_id, mention=False)
if check:
output+="* removed from the helper list."
sql.db.remove_help_user(user_id)
## debugging purposes:
#check=sql.db.check_help_user(user_id)
#output+=str(check)
else:
output+="* not found on the helper list."
context.bot.send_message(chat_id=chat_id, text=str(output), parse_mode="Markdown")
return()
def helpme(update, context): #db'd
chat_id=update.effective_chat.id
user_id=update.effective_user.id
if not toolbox.user_is_in_group_or_admin(chat_id, user_id):
return()
help_users=sql.db.get_help_users()
output="Pinging users in the helper list:\n"
output+=toolbox.list_users(help_users)
context.bot.send_message(chat_id=chat_id, text=output, parse_mode="Markdown")#, reply_to_message_id=update.message.message_id)
def listusers(update, context): #db'd
chat_id=update.effective_chat.id
user_id=update.effective_user.id
if not toolbox.user_is_admin(user_id):
return()
output = "Listing users with IDs:\n"
all_users = sql.db.get_all_users()
for i in all_users:
output = output + str(i['user_id']) + " " + i['name']+"\n"
context.bot.send_message(chat_id=chat_id, text=output)#, parse_mode="Markdown")#, reply_to_message_id=update.message.message_id)
#TODO: use the same add function for both self and manual add?
#TODO: allow list of users to be added
#maybe do that by rewriting with context.args[0] instead of stripping the string
def adduser(update, context): #db'd
chat_id=update.effective_chat.id
user_id=update.effective_user.id
if not toolbox.user_is_admin(user_id):
return()
#help_users=toolbox.load_json('help_users')
user_id=int(toolbox.remove_prefix(update.message.text,"/adduser "))
check=sql.db.check_help_user(user_id)
output="*"
output+=""+toolbox.list_user(user_id, mention=False)
if check:
output+="* is already on the helper list."
# i don't think there may be duplicates if my bot works correctly,
# but using a database creates that possibility.
if len(check)>1 and config.verbose:
toolbox.notify_owner("warning: dupligate found for "+ str(check)+"\n"+
"\nchat_id: "+chat_id+
"\nuser_id: "+user_id
)
else:
output+="* added to the helper list at index "
output+=str(sql.db.add_help_user(user_id)[0])+"."
if config.verbose:
print(output)
context.bot.send_message(chat_id=chat_id, text=str(output), parse_mode="Markdown")
return()
def removeuser(update, context): #db'd
chat_id=update.effective_chat.id
user_id=update.effective_user.id
if not toolbox.user_is_admin(user_id):
return()
user_id=int(toolbox.remove_prefix(update.message.text,"/removeuser "))
check=sql.db.check_help_user(user_id)
output=""
output+="*"+toolbox.list_user(user_id, mention=False)
if check:
output+="* removed from the helper list."
sql.db.remove_help_user(user_id)
## debugging purposes:
#check=sql.db.check_help_user(user_id)
#output+=str(check)
else:
output+="* not found on the helper list."
context.bot.send_message(chat_id=chat_id, text=str(output), parse_mode="Markdown")
return() | 36.698529 | 133 | 0.642957 | 711 | 4,991 | 4.299578 | 0.189873 | 0.07262 | 0.045797 | 0.045797 | 0.819431 | 0.813543 | 0.813543 | 0.813543 | 0.813543 | 0.784102 | 0 | 0.003173 | 0.242236 | 4,991 | 136 | 134 | 36.698529 | 0.80513 | 0.167702 | 0 | 0.755102 | 0 | 0 | 0.126972 | 0 | 0 | 0 | 0 | 0.007353 | 0 | 1 | 0.061224 | false | 0 | 0.030612 | 0 | 0.091837 | 0.020408 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a2bd93ee0f23524d02d72481d8b8a4c20e1d62b9 | 103 | py | Python | simpleflow/dispatch/__init__.py | nstott/simpleflow | 483602deb745a09b59ad6e24052dd5096c54fad2 | [
"MIT"
] | null | null | null | simpleflow/dispatch/__init__.py | nstott/simpleflow | 483602deb745a09b59ad6e24052dd5096c54fad2 | [
"MIT"
] | null | null | null | simpleflow/dispatch/__init__.py | nstott/simpleflow | 483602deb745a09b59ad6e24052dd5096c54fad2 | [
"MIT"
] | null | null | null | from . import by_module # NOQA
from . import from_task_registry # NOQA
from . import dry_run # NOQA
| 25.75 | 40 | 0.737864 | 16 | 103 | 4.5 | 0.5625 | 0.416667 | 0.388889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.203884 | 103 | 3 | 41 | 34.333333 | 0.878049 | 0.135922 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a2e62ca5414fc8d0661d5a204d7224153ac48b75 | 3,180 | py | Python | tests/unit/flow/test_remote_orchestrate.py | yk/jina | ab66e233e74b956390f266881ff5dc4e0110d3ff | [
"Apache-2.0"
] | 1 | 2020-12-23T12:34:00.000Z | 2020-12-23T12:34:00.000Z | tests/unit/flow/test_remote_orchestrate.py | yk/jina | ab66e233e74b956390f266881ff5dc4e0110d3ff | [
"Apache-2.0"
] | null | null | null | tests/unit/flow/test_remote_orchestrate.py | yk/jina | ab66e233e74b956390f266881ff5dc4e0110d3ff | [
"Apache-2.0"
] | null | null | null | import pytest
from jina import Flow, __default_host__
from jina.helper import get_internal_ip, get_public_ip
@pytest.mark.parametrize('local_ip, on_public', [(get_internal_ip(), False),
(get_public_ip(), True)])
def test_remote_pod_local_gateway(local_ip, on_public):
# BIND socket's host must always be 0.0.0.0
remote_ip = '111.111.111.111'
f = Flow(expose_public=on_public).add(host=remote_ip)
f.build()
for k, v in f:
print(f'{v.name}\tIN: {v.address_in}\t{v.address_out}')
assert f['pod0'].host_in == __default_host__
assert f['pod0'].host_out == __default_host__
assert f['gateway'].host_in == remote_ip
assert f['gateway'].host_out == remote_ip
@pytest.mark.parametrize('local_ip, on_public', [(get_internal_ip(), False),
(get_public_ip(), True)])
def test_remote_pod_local_pod_local_gateway(local_ip, on_public):
remote_ip = '111.111.111.111'
f = Flow(expose_public=on_public).add(host=remote_ip).add()
f.build()
for k, v in f._pod_nodes.items():
print(f'{v.name}\tIN: {v.address_in}\t{v.address_out}')
assert f['pod0'].host_in == __default_host__
assert f['pod0'].host_out == local_ip
assert f['pod1'].host_in == __default_host__
assert f['pod1'].host_out == __default_host__
assert f['gateway'].host_in == __default_host__
assert f['gateway'].host_out == remote_ip
@pytest.mark.parametrize('local_ip, on_public', [(get_internal_ip(), False),
(get_public_ip(), True)])
def test_remote_pod_local_pod_remote_pod_local_gateway(local_ip, on_public):
remote1 = '111.111.111.111'
remote2 = '222.222.222.222'
f = Flow(expose_public=on_public).add(host=remote1).add().add(host=remote2)
f.build()
for k, v in f._pod_nodes.items():
print(f'{v.name}\tIN: {v.address_in}\t{v.address_out}')
assert f['pod0'].host_in == __default_host__
assert f['pod0'].host_out == local_ip
assert f['pod1'].host_in == __default_host__
assert f['pod1'].host_out == remote2
assert f['pod2'].host_in == __default_host__
assert f['pod2'].host_out == __default_host__
assert f['gateway'].host_in == remote2
assert f['gateway'].host_out == remote1
@pytest.mark.parametrize('local_ip, on_public', [(get_internal_ip(), False),
(get_public_ip(), True)])
def test_local_pod_remote_pod_remote_pod_local_gateway(local_ip, on_public):
remote1 = '111.111.111.111'
remote2 = '222.222.222.222'
f = Flow(expose_public=on_public).add().add(host=remote1).add(host=remote2)
f.build()
for k, v in f:
print(f'{v.name}\tIN: {v.address_in}\t{v.address_out}')
assert f['pod0'].host_in == __default_host__
assert f['pod0'].host_out == remote1
assert f['pod1'].host_in == __default_host__
assert f['pod1'].host_out == remote2
assert f['pod2'].host_in == __default_host__
assert f['pod2'].host_out == __default_host__
assert f['gateway'].host_in == remote2
assert f['gateway'].host_out == __default_host__
| 39.75 | 79 | 0.649057 | 476 | 3,180 | 3.922269 | 0.113445 | 0.097483 | 0.127477 | 0.134976 | 0.899304 | 0.899304 | 0.893412 | 0.87895 | 0.865024 | 0.831816 | 0 | 0.042604 | 0.20283 | 3,180 | 79 | 80 | 40.253165 | 0.693886 | 0.012893 | 0 | 0.714286 | 0 | 0 | 0.151196 | 0.039553 | 0 | 0 | 0 | 0 | 0.412698 | 1 | 0.063492 | false | 0 | 0.047619 | 0 | 0.111111 | 0.063492 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0c099b51eff54c3c3c1eda07df2c7ec2f2c33c84 | 227 | py | Python | optuna/multi_objective/_hypervolume/__init__.py | thigm85/optuna | 4680f36a470ffb9ead89abf65dcc7e7533fd789f | [
"MIT"
] | 1 | 2019-05-28T07:29:49.000Z | 2019-05-28T07:29:49.000Z | optuna/multi_objective/_hypervolume/__init__.py | nabenabe0928/optuna | aa505125de8515518fe19ba227edf7a1d3f8ebda | [
"MIT"
] | null | null | null | optuna/multi_objective/_hypervolume/__init__.py | nabenabe0928/optuna | aa505125de8515518fe19ba227edf7a1d3f8ebda | [
"MIT"
] | null | null | null | from optuna.multi_objective._hypervolume.utils import _compute_2points_volume # NOQA
from optuna.multi_objective._hypervolume.base import BaseHypervolume # NOQA
from optuna.multi_objective._hypervolume.wfg import WFG # NOQA
| 56.75 | 85 | 0.854626 | 29 | 227 | 6.37931 | 0.482759 | 0.162162 | 0.243243 | 0.389189 | 0.610811 | 0.421622 | 0 | 0 | 0 | 0 | 0 | 0.004854 | 0.092511 | 227 | 3 | 86 | 75.666667 | 0.893204 | 0.061674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0c1554505f83f55e61a0dcb735a076add4e7915f | 8,733 | py | Python | tests/google/test_transform_collect_fields.py | LudditeLabs/autodoc-tool | b4ae7e3b61907e7e9c3a1b534fce055e5860ffab | [
"Apache-2.0"
] | null | null | null | tests/google/test_transform_collect_fields.py | LudditeLabs/autodoc-tool | b4ae7e3b61907e7e9c3a1b534fce055e5860ffab | [
"Apache-2.0"
] | null | null | null | tests/google/test_transform_collect_fields.py | LudditeLabs/autodoc-tool | b4ae7e3b61907e7e9c3a1b534fce055e5860ffab | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Luddite Labs Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from autodoc.python.google.transforms.collect_fields import CollectGoogleSections
# These param will be loaded by the fixtures (assert_py_doc, parse_py_doc).
docstring_transforms = [CollectGoogleSections]
# TODO: improve me, these tests are very simple and stupid.
class TestCollectSections:
def test_notes(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. admonition:: Notes
Quis nostrud exercitation ullamco. In voluptate velit esse
cillum dolore eu fugiat nulla.
Ut enim ad minim veniam.
.. admonition:: Notes
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('notes')
assert section is not None
# Notes section contains paragraph of the found admonitions.
# See Also: CollectGoogleSections.process_notes()
assert len(section) == 3
def test_note(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. note::
Quis nostrud exercitation ullamco. In voluptate velit esse
cillum dolore eu fugiat nulla.
Ut enim ad minim veniam.
.. note:: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('note')
assert section is not None
assert len(section) == 2
def test_examples(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. admonition:: Examples
Examples should be written in doctest format, and should
illustrate how to use the function.
>>> print([i for i in example_generator(4)])
[0, 1, 2, 3]
.. admonition:: Examples
Examples 2
>>> print([i for i in example_generator(4)])
[0, 1, 2, 3]
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('examples')
assert section is not None
# 'Examples' section contains paragraph of the found admonitions.
# See Also: CollectGoogleSections.process_examples()
assert len(section) == 4
def test_example(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. admonition:: Example
Examples should be written in doctest format, and should
illustrate how to use the function.
>>> print([i for i in example_generator(4)])
[0, 1, 2, 3]
.. admonition:: Example
Examples 2
>>> print([i for i in example_generator(4)])
[0, 1, 2, 3]
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('example')
assert section is not None
# 'Example' section contains paragraph of the found admonitions.
# See Also: CollectGoogleSections.process_example()
assert len(section) == 4
def test_references(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. admonition:: References
Same as examples section.
.. admonition:: References
Quis autem vel eum iure reprehenderit qui in ea voluptate
Examples should be written in doctest format, and should
illustrate how to use the function.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('references')
assert section is not None
assert len(section) == 2
def test_seealso(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. seealso::
Same as examples section.
Quis autem vel eum iure reprehenderit qui in ea voluptate
Examples should be written in doctest format, and should
illustrate how to use the function.
.. seealso:: Short text.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('seealso')
assert section is not None
assert len(section) == 2
def test_todo(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. todo::
* For module TODOs
* You have to also use ``sphinx.ext.todo`` extension
.. todo:: Do something smart.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('todo')
assert section is not None
assert len(section) == 2
def test_warning(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
.. warning::
Quis autem vel eum iure reprehenderit
Examples should be written in doctest.
.. warning:: suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur?
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('warning')
assert section is not None
assert len(section) == 2
def test_warn(self, parse_py_doc):
env = parse_py_doc(
text="""
Lorem ipsum dolor sit amet, consectetur adipiscing elit...
:Warns: * **Some text.**
* **Quis autem vel eum iure reprehenderit**
* **Examples should be written in doctest.**
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam.
:Warns: * **Some text.**
* **Quis autem vel eum iure reprehenderit**
* **Examples should be written in doctest.**
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('warns')
assert section is not None
assert len(section) == 2
| 31.527076 | 84 | 0.591435 | 1,015 | 8,733 | 5 | 0.196059 | 0.019704 | 0.037438 | 0.039015 | 0.770837 | 0.757833 | 0.746798 | 0.742857 | 0.742857 | 0.735172 | 0 | 0.006698 | 0.333219 | 8,733 | 276 | 85 | 31.641304 | 0.864846 | 0.116455 | 0 | 0.744318 | 0 | 0 | 0.623083 | 0.011437 | 0 | 0 | 0 | 0.003623 | 0.153409 | 1 | 0.051136 | false | 0 | 0.011364 | 0 | 0.068182 | 0.022727 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0c5f0c4a27bd1cfa6fcf9c568337a2f557d9a895 | 38 | py | Python | exercicios/excor.py | marcellass/Exercicios-de-Python-Curso-em-Video | cdc1609a7afc59b67615d78c20c3611ee5634b5c | [
"MIT"
] | null | null | null | exercicios/excor.py | marcellass/Exercicios-de-Python-Curso-em-Video | cdc1609a7afc59b67615d78c20c3611ee5634b5c | [
"MIT"
] | null | null | null | exercicios/excor.py | marcellass/Exercicios-de-Python-Curso-em-Video | cdc1609a7afc59b67615d78c20c3611ee5634b5c | [
"MIT"
] | null | null | null | print('\033[7;33;44mOlá, Mundo\033[m') | 38 | 38 | 0.684211 | 8 | 38 | 3.25 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.297297 | 0.026316 | 38 | 1 | 38 | 38 | 0.405405 | 0 | 0 | 0 | 0 | 0 | 0.74359 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
a758a571c48af1b788cade9ef8e0330f1f193e92 | 2,777 | py | Python | tests/game/test_armor.py | gsverhoeven/ffai | 673ff00e1aac905381cdfb1228ccfcfccda97d1f | [
"Apache-2.0"
] | 3 | 2019-03-05T16:43:37.000Z | 2020-04-11T14:24:58.000Z | tests/game/test_armor.py | gsverhoeven/ffai | 673ff00e1aac905381cdfb1228ccfcfccda97d1f | [
"Apache-2.0"
] | 1 | 2019-02-24T23:04:16.000Z | 2019-02-24T23:04:16.000Z | tests/game/test_armor.py | gsverhoeven/ffai | 673ff00e1aac905381cdfb1228ccfcfccda97d1f | [
"Apache-2.0"
] | null | null | null | import pytest
from ffai.core.game import *
from unittest.mock import *
import numpy as np
@patch("ffai.core.game.Game")
def test_armour_with_mighty_blow(mock_game):
# patch the mock game proc stack
stack = Stack()
mock_game.state.stack = stack
with patch("ffai.core.util.Stack", new_callable=PropertyMock) as a:
a.return_value=stack
# fix the dice rolls - 4+5 = 9 -> not broken without MB
D6.FixedRolls.clear()
D6.FixedRolls.append(4)
D6.FixedRolls.append(5)
role = Role("Blitzer", "orc", 6,3,3,9, [], 50000, None)
player = Player("1", role, "test", 1, "orc")
blocker = Player("1", role, "test", 1, "orc", extra_skills=[Skill.MIGHTY_BLOW])
arm = Armor(mock_game, player, inflictor=blocker)
arm.step(action=None)
# mighty blow makes armour broken (10)
proc = stack.peek()
assert isinstance(proc, Injury)
assert proc.mighty_blow_used == True # indicate MB can't be used in Injury roll
@patch("ffai.core.game.Game")
def test_armour_broken_with_mighty_blow_unused(mock_game):
# patch the mock game proc stack
stack = Stack()
mock_game.state.stack = stack
with patch("ffai.core.util.Stack", new_callable=PropertyMock) as a:
a.return_value=stack
# fix the dice rolls - 5+5 = 10 -> broken without MB
D6.FixedRolls.clear()
D6.FixedRolls.append(5)
D6.FixedRolls.append(6)
role = Role("Blitzer", "orc", 6,3,3,9, [], 50000, None)
player = Player("1", role, "test", 1, "orc")
blocker = Player("1", role, "test", 1, "orc", extra_skills=[Skill.MIGHTY_BLOW])
arm = Armor(mock_game, player, inflictor=blocker)
arm.step(action=None)
# armour broken (10)
proc = stack.peek()
assert isinstance(proc, Injury)
assert proc.mighty_blow_used == False # indicate MB can be used in Injury roll
@patch("ffai.core.game.Game")
def test_armour_no_break(mock_game):
# patch the mock game proc stack
stack = Stack()
mock_game.state.stack = stack
with patch("ffai.core.util.Stack", new_callable=PropertyMock) as a:
a.return_value=stack
# fix the dice rolls - 4+5 = 9 -> not broken without MB
D6.FixedRolls.clear()
D6.FixedRolls.append(4)
D6.FixedRolls.append(5)
role = Role("Blitzer", "orc", 6,3,3,9, [], 50000, None)
player = Player("1", role, "test", 1, "orc")
blocker = Player("1", role, "test", 1, "orc")
arm = Armor(mock_game, player, inflictor=blocker)
arm.step(action=None)
# NO mighty blow so proc is still Armor for unbroken
proc = stack.peek()
assert isinstance(proc, Armor)
| 36.064935 | 87 | 0.617573 | 388 | 2,777 | 4.32732 | 0.216495 | 0.057177 | 0.046456 | 0.053603 | 0.857653 | 0.857653 | 0.837999 | 0.837999 | 0.817749 | 0.787969 | 0 | 0.03285 | 0.254591 | 2,777 | 76 | 88 | 36.539474 | 0.778261 | 0.157724 | 0 | 0.777778 | 0 | 0 | 0.083835 | 0 | 0 | 0 | 0 | 0 | 0.092593 | 1 | 0.055556 | false | 0 | 0.074074 | 0 | 0.12963 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a79f1e52350b3abcf0408d80517e584005281ccc | 133 | py | Python | eospyo/__init__.py | FACINGS/eospyo | b198841f48b217369ef772d6d9c792d7f5f79945 | [
"MIT"
] | 8 | 2021-11-07T13:51:32.000Z | 2022-03-29T17:24:52.000Z | eospyo/__init__.py | FACINGS/eospyo | b198841f48b217369ef772d6d9c792d7f5f79945 | [
"MIT"
] | 8 | 2021-11-07T16:20:39.000Z | 2021-11-24T11:15:21.000Z | eospyo/__init__.py | FACINGS/eospyo | b198841f48b217369ef772d6d9c792d7f5f79945 | [
"MIT"
] | null | null | null | from . import exc, types
from ._version import __version__
from .net import * # NOQA: F403
from .transaction import * # NOQA: F403
| 26.6 | 40 | 0.729323 | 18 | 133 | 5.111111 | 0.5 | 0.217391 | 0.304348 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.055556 | 0.18797 | 133 | 4 | 41 | 33.25 | 0.796296 | 0.157895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
38f9aca2f9729907b3d7f41e4415f028b7006e46 | 5,795 | py | Python | userbot/modules/gitcommit.py | oxyda-fox/XBot-Remix | 3d97bea5395b223fc89a8cc6cb699cc624ccc967 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/modules/gitcommit.py | oxyda-fox/XBot-Remix | 3d97bea5395b223fc89a8cc6cb699cc624ccc967 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/modules/gitcommit.py | oxyda-fox/XBot-Remix | 3d97bea5395b223fc89a8cc6cb699cc624ccc967 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | #Encript Marshal By XVenom
#https://github.com/xvenom15
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\xa6\x00\x00\x00d\x00d\x01l\x00m\x01Z\x01\x01\x00d\x00d\x02l\x02Z\x02d\x00d\x02l\x03Z\x03d\x00d\x02l\x04Z\x04d\x00d\x02l\x05Z\x05d\x00d\x03l\x06m\x06Z\x06\x01\x00d\x00d\x04l\x07m\x08Z\x08\x01\x00d\x00d\x05l\tm\nZ\n\x01\x00d\x00d\x06l\x0bm\x0cZ\x0c\x01\x00d\x00d\x07l\rm\x0eZ\x0em\x0fZ\x0fm\x10Z\x10m\x11Z\x11\x01\x00d\x08Z\x12e\x0cd\td\nd\x0b\x8d\x02d\x0cd\r\x84\x00\x83\x01Z\x13d\x0ed\x0f\x84\x00Z\x14e\x0e\xa0\x15d\x10d\x11i\x01\xa1\x01\x01\x00d\x02S\x00)\x12\xe9\x00\x00\x00\x00)\x01\xda\x06GithubN)\x01\xda\x08datetime)\x01\xda\x06events)\x01\xda\x16DocumentAttributeVideo)\x01\xda\x08register)\x04\xda\x08CMD_HELP\xda\x13GITHUB_ACCESS_TOKEN\xda\rGIT_REPO_NAME\xda\x03bot\xfa\x0f./userbot/temp/Tz\x14^.gcommit(?: |$)(.*))\x02Z\x08outgoingZ\x07patternc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00\n\x00\x00\x00\xc3\x00\x00\x00s:\x01\x00\x00|\x00j\x00r\nd\x00S\x00t\x01d\x00k\x08r&|\x00\xa0\x02d\x01\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00t\x03d\x00k\x08rB|\x00\xa0\x02d\x02\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00|\x00\xa0\x04d\x03\xa1\x01I\x00d\x00H\x00}\x01t\x05j\x06\xa0\x07t\x08\xa1\x01sht\x05\xa0\tt\x08\xa1\x01\x01\x00t\n\xa0\x0b\xa1\x00}\x02|\x00\xa0\x0c\xa1\x00I\x00d\x00H\x00}\x03z(t\r\xa0\r\xa1\x00}\x04t\x0ed\x04\x83\x01\x01\x00t\x0f\xa0\x10|\x03j\x11t\x08\xa1\x02I\x00d\x00H\x00}\x05W\x00n6\x04\x00t\x12k\nr\xdc\x01\x00}\x06\x01\x00z\x18|\x01\xa0\x02t\x13|\x06\x83\x01\xa1\x01I\x00d\x00H\x00\x01\x00W\x005\x00d\x00}\x06~\x06X\x00Y\x00nZX\x00t\n\xa0\x0b\xa1\x00}\x07|\x07|\x02\x18\x00j\x14}\x08|\x00\xa0\x15\xa1\x00I\x00d\x00H\x00\x01\x00|\x01\xa0\x02d\x05\xa0\x16|\x05|\x08\xa1\x02\xa1\x01I\x00d\x00H\x00\x01\x00|\x01\xa0\x02d\x06\xa1\x01I\x00d\x00H\x00\x01\x00t\x17|\x05|\x01\x83\x02I\x00d\x00H\x00\x01\x00d\x00S\x00)\x07Nz0`Please ADD Proper Access Token from github.com`z4`Please ADD Proper Github Repo Name of your userbot`z\x0eProcessing ...z\x1dDownloading to TEMP directoryz!Downloaded to `{}` in {} seconds.z\x18Committing to Github....)\x18Z\x08fwd_fromr\x08\x00\x00\x00\xda\x04editr\t\x00\x00\x00Z\x05reply\xda\x02os\xda\x04path\xda\x05isdir\xda\x0cGIT_TEMP_DIR\xda\x08makedirsr\x03\x00\x00\x00Z\x03nowZ\x11get_reply_message\xda\x04time\xda\x05printr\n\x00\x00\x00Z\x0edownload_mediaZ\x05media\xda\tException\xda\x03strZ\x07seconds\xda\x06delete\xda\x06format\xda\ngit_commit)\tZ\x05event\xda\x04mone\xda\x05startZ\rreply_messageZ\x06c_timeZ\x14downloaded_file_name\xda\x01e\xda\x03endZ\x02ms\xa9\x00r\x1d\x00\x00\x00\xda\x00\xda\x08download\x15\x00\x00\x00s8\x00\x00\x00\x00\x03\x06\x01\x04\x01\x08\x01\x10\x01\x04\x01\x08\x01\x10\x01\x04\x01\x10\x01\x0c\x01\n\x01\x08\x01\x0e\x01\x02\x01\x08\x01\x08\x01\x04\x01\x04\x01\x02\xfe\x0e\x04\x10\x01&\x02\x08\x01\n\x01\x0e\x01\x18\x01\x10\x01r\x1f\x00\x00\x00c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r\x00\x00\x00\x06\x00\x00\x00\xc3\x00\x00\x00s>\x01\x00\x00g\x00}\x02t\x00}\x03t\x01|\x03\x83\x01}\x04t\x02|\x00d\x01d\x02d\x03\x8d\x03}\x05|\x05\xa0\x03\xa1\x00}\x06|\x04\xa0\x04t\x05\xa1\x01}\x07t\x06|\x07j\x07\x83\x01\x01\x00d\x04}\x08|\x07\xa0\x08d\x05\xa1\x01}\t|\tD\x00]\x1a}\n|\x02\xa0\tt\n|\n\x83\x01\xa1\x01\x01\x00t\x06|\n\x83\x01\x01\x00qL|\x02D\x00],}\x0bd\x04}\x08|\x0bd\x06|\x00\x17\x00d\x07\x17\x00k\x02rl|\x01\xa0\x0bd\x08\xa1\x01I\x00d\x00H\x00\x02\x00\x01\x00S\x00qld\n|\x00\x17\x00}\x00|\x08d\x04k\x08\x90\x01r*|\x00\xa0\x0cd\x0bd\x05\xa1\x02}\x00t\x06|\x00\x83\x01\x01\x00zB|\x07j\r|\x00d\x0c|\x06d\rd\x0e\x8d\x04\x01\x00t\x06d\x0f\x83\x01\x01\x00t\x05}\x0c|\x0c\xa0\x0e\xa1\x00}\x0c|\x01\xa0\x0bd\x10|\x0c\x9b\x00d\x11\x9d\x03\xa1\x01I\x00d\x00H\x00\x01\x00W\x00n$\x01\x00\x01\x00\x01\x00t\x06d\x12\x83\x01\x01\x00|\x01\xa0\x0bd\x13\xa1\x01I\x00d\x00H\x00\x01\x00Y\x00n\x02X\x00n\x10|\x01\xa0\x0bd\x14\xa1\x01I\x00d\x00H\x00S\x00d\x00S\x00)\x15N\xda\x01rz\x05utf-8)\x01\xda\x08encodingTr\x1e\x00\x00\x00z\x12ContentFile(path="z\x02")z\x15`File Already Exists`Fz\x10userbot/modules/r\x0b\x00\x00\x00z\x13Uploaded New Pluginz\x0csql-extended)\x01Z\x06branchz\x0eCommitted FilezB`Commited On Your Github Repo`\n\n[Your Modules](https://github.com/z$/tree/sql-extended/userbot/modules/)z\x14Cannot Create Pluginz\x14Cannot Upload Pluginz\x13`Committed Suicide`)\x0fr\x08\x00\x00\x00r\x02\x00\x00\x00\xda\x04open\xda\x04readZ\x08get_repor\t\x00\x00\x00r\x13\x00\x00\x00\xda\x04nameZ\x0cget_contents\xda\x06appendr\x15\x00\x00\x00r\x0c\x00\x00\x00\xda\x07replace\xda\x0bcreate_file\xda\x05strip)\r\xda\tfile_namer\x19\x00\x00\x00Z\x0ccontent_listZ\x0caccess_token\xda\x01g\xda\x04fileZ\x0bcommit_dataZ\x04repor\'\x00\x00\x00\xda\x08contentsZ\x0ccontent_file\xda\x01iZ\x05ccessr\x1d\x00\x00\x00r\x1d\x00\x00\x00r\x1e\x00\x00\x00r\x18\x00\x00\x006\x00\x00\x00s>\x00\x00\x00\x00\x01\x04\x01\x04\x01\x08\x01\x0e\x01\x08\x01\n\x01\n\x01\x04\x01\n\x01\x08\x01\x0e\x01\n\x01\x08\x01\x04\x01\x10\x01\x14\x01\x02\x01\x08\x01\n\x01\x0c\x01\x08\x01\x02\x01\x12\x01\x08\x01\x04\x01\x08\x01\x1c\x01\x06\x01\x08\x01\x18\x02r\x18\x00\x00\x00Z\ngitcommitez2.gcommit \nUsage: To commit on your github repo.)\x16Z\x06githubr\x02\x00\x00\x00Z\x07aiohttpZ\x07asyncior\r\x00\x00\x00r\x12\x00\x00\x00r\x03\x00\x00\x00Z\x08telethonr\x04\x00\x00\x00Z\x11telethon.tl.typesr\x05\x00\x00\x00Z\x0euserbot.eventsr\x06\x00\x00\x00Z\x07userbotr\x07\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00r\n\x00\x00\x00r\x10\x00\x00\x00r\x1f\x00\x00\x00r\x18\x00\x00\x00\xda\x06updater\x1d\x00\x00\x00r\x1d\x00\x00\x00r\x1d\x00\x00\x00r\x1e\x00\x00\x00\xda\x08<module>\x04\x00\x00\x00s"\x00\x00\x00\x0c\x01\x08\x01\x08\x01\x08\x01\x08\x01\x0c\x01\x0c\x01\x0c\x03\x0c\x02\x18\x03\x04\x02\n\x02\n\x1f\x08#\x04\x01\x02\x01\x02\xfe')) | 1,448.75 | 5,724 | 0.773598 | 1,185 | 5,795 | 3.762869 | 0.254008 | 0.153398 | 0.108993 | 0.0915 | 0.213725 | 0.166405 | 0.12828 | 0.091949 | 0.063916 | 0.041265 | 0 | 0.313056 | 0.010009 | 5,795 | 4 | 5,724 | 1,448.75 | 0.46418 | 0.008973 | 0 | 0 | 0 | 0.5 | 0.805294 | 0.751654 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 11 |
ac1eb7c8af8fbddef058aba8b23ccdcc03df3a6c | 9,657 | py | Python | build/c4che/_cache.py | Shawnjoseph2001/TimeStylePebble | d278637348fdc3e694acc73b87dd53c737f22c9d | [
"MIT"
] | 1 | 2020-12-24T11:15:18.000Z | 2020-12-24T11:15:18.000Z | build/c4che/_cache.py | Shawnjoseph2001/TimeStylePebble | d278637348fdc3e694acc73b87dd53c737f22c9d | [
"MIT"
] | null | null | null | build/c4che/_cache.py | Shawnjoseph2001/TimeStylePebble | d278637348fdc3e694acc73b87dd53c737f22c9d | [
"MIT"
] | null | null | null | BINDIR = '/usr/local/bin'
BLOCK_MESSAGE_KEYS = []
BUILD_TYPE = 'app'
BUNDLE_NAME = 'TimeStylePebble.pbw'
DEFINES = ['RELEASE']
LIBDIR = '/usr/local/lib'
LIB_DIR = 'node_modules'
LIB_JSON = [{u'files': [u'dist.zip'], u'license': u'MIT', u'name': u'pebble-fctx', u'repository': {u'url': u'https://github.com/jrmobley/pebble-fctx.git', u'type': u'git'}, u'author': u'JR Mobley', u'version': u'1.6.2', u'dependencies': {u'pebble-utf8': u'^1.0.1'}, u'keywords': [u'pebble-package'], 'path': 'node_modules/pebble-fctx/dist', u'pebble': {u'sdkVersion': u'3', u'projectType': u'package', u'targetPlatforms': [u'aplite', u'basalt', u'chalk', u'diorite', u'emery'], u'resources': {u'media': []}}}, {u'files': [u'dist.zip'], u'license': u'MIT', u'name': u'pebble-utf8', u'repository': {u'url': u'https://github.com/jrmobley/pebble-utf8.git', u'type': u'git'}, u'author': u'JR Mobley', u'version': u'1.0.1', u'dependencies': {}, u'keywords': [u'pebble-package'], 'path': 'node_modules/pebble-utf8/dist', u'pebble': {u'sdkVersion': u'3', u'projectType': u'package', u'targetPlatforms': [u'aplite', u'basalt', u'chalk', u'diorite', u'emery'], u'resources': {u'media': []}}}]
LIB_RESOURCES_JSON = {u'pebble-fctx': [], u'pebble-utf8': []}
MESSAGE_KEYS = {u'WeatherTemperature': 10001, u'SettingBluetoothVibe': 10008, u'SettingAltClockName': 10005, u'SettingWidget2ID': 10029, u'SettingDisableWeather': 10015, u'SettingClockFontId': 10010, u'WeatherForecastLowTemp': 10004, u'SettingDisableAutobattery': 10007, u'SettingHealthUseDistance': 10016, u'SettingUseMetric': 10025, u'SettingLanguageID': 10019, u'WeatherUseNightIcon': 10026, u'SettingWidget1ID': 10028, u'SettingAltClockOffset': 10006, u'SettingColorTime': 10013, u'SettingWidget0ID': 10027, u'SettingSidebarOnLeft': 10022, u'WeatherCondition': 10000, u'SettingDecimalSep': 10014, u'SettingShowLeadingZero': 10021, u'WeatherForecastCondition': 10002, u'SettingSidebarTextColor': 10023, u'SettingColorBG': 10011, u'SettingUseLargeFonts': 10024, u'SettingHourlyVibe': 10018, u'SettingDisconnectIcon': 10009, u'SettingShowBatteryPct': 10020, u'SettingHealthUseRestfulSleep': 10017, u'SettingColorSidebar': 10012, u'WeatherForecastHighTemp': 10003}
MESSAGE_KEYS_DEFINITION = '/Users/shawn/pebble-dev/pebble-sdk-4.5-mac/TimeStylePebble/build/src/message_keys.auto.c'
MESSAGE_KEYS_HEADER = '/Users/shawn/pebble-dev/pebble-sdk-4.5-mac/TimeStylePebble/build/include/message_keys.auto.h'
MESSAGE_KEYS_JSON = '/Users/shawn/pebble-dev/pebble-sdk-4.5-mac/TimeStylePebble/build/js/message_keys.json'
NODE_PATH = '/Users/shawn/Library/Application Support/Pebble SDK/SDKs/current/node_modules'
PEBBLE_SDK_COMMON = '/Users/shawn/Library/Application Support/Pebble SDK/SDKs/current/sdk-core/pebble/common'
PEBBLE_SDK_ROOT = '/Users/shawn/Library/Application Support/Pebble SDK/SDKs/current/sdk-core/pebble'
PREFIX = '/usr/local'
PROJECT_INFO = {'appKeys': {u'WeatherTemperature': 10001, u'SettingBluetoothVibe': 10008, u'SettingAltClockName': 10005, u'SettingWidget2ID': 10029, u'SettingDisableWeather': 10015, u'SettingClockFontId': 10010, u'WeatherForecastLowTemp': 10004, u'SettingDisableAutobattery': 10007, u'SettingHealthUseDistance': 10016, u'SettingUseMetric': 10025, u'SettingLanguageID': 10019, u'WeatherUseNightIcon': 10026, u'SettingWidget1ID': 10028, u'SettingAltClockOffset': 10006, u'SettingColorTime': 10013, u'SettingWidget0ID': 10027, u'SettingSidebarOnLeft': 10022, u'WeatherCondition': 10000, u'SettingDecimalSep': 10014, u'SettingShowLeadingZero': 10021, u'WeatherForecastCondition': 10002, u'SettingSidebarTextColor': 10023, u'SettingColorBG': 10011, u'SettingUseLargeFonts': 10024, u'SettingHourlyVibe': 10018, u'SettingDisconnectIcon': 10009, u'SettingShowBatteryPct': 10020, u'SettingHealthUseRestfulSleep': 10017, u'SettingColorSidebar': 10012, u'WeatherForecastHighTemp': 10003}, u'sdkVersion': u'3', u'displayName': u'TimeStyle', u'uuid': u'4368ffa4-f0fb-4823-90be-f754b076bdaa', u'messageKeys': {u'WeatherTemperature': 10001, u'SettingBluetoothVibe': 10008, u'SettingAltClockName': 10005, u'SettingWidget2ID': 10029, u'SettingDisableWeather': 10015, u'SettingClockFontId': 10010, u'WeatherForecastLowTemp': 10004, u'SettingDisableAutobattery': 10007, u'SettingHealthUseDistance': 10016, u'SettingUseMetric': 10025, u'SettingLanguageID': 10019, u'WeatherUseNightIcon': 10026, u'SettingWidget1ID': 10028, u'SettingAltClockOffset': 10006, u'SettingColorTime': 10013, u'SettingWidget0ID': 10027, u'SettingSidebarOnLeft': 10022, u'WeatherCondition': 10000, u'SettingDecimalSep': 10014, u'SettingShowLeadingZero': 10021, u'WeatherForecastCondition': 10002, u'SettingSidebarTextColor': 10023, u'SettingColorBG': 10011, u'SettingUseLargeFonts': 10024, u'SettingHourlyVibe': 10018, u'SettingDisconnectIcon': 10009, u'SettingShowBatteryPct': 10020, u'SettingHealthUseRestfulSleep': 10017, u'SettingColorSidebar': 10012, u'WeatherForecastHighTemp': 10003}, 'companyName': u'Freakified', u'enableMultiJS': True, u'targetPlatforms': [u'basalt', u'chalk', u'diorite', u'emery'], u'capabilities': [u'location', u'health', u'configurable'], 'versionLabel': u'7.1', 'longName': u'TimeStyle', 'shortName': u'TimeStyle', u'watchapp': {u'onlyShownOnCommunication': False, u'hiddenApp': False, u'watchface': True}, u'resources': {u'media': [{u'menuIcon': True, u'type': u'bitmap', u'name': u'MENU_ICON', u'file': u'images/menuicon.png'}, {u'type': u'raw', u'name': u'LECO_REGULAR_FFONT', u'file': u'fonts/LECO1976-Regular.ffont'}, {u'type': u'raw', u'name': u'AVENIR_REGULAR_FFONT', u'file': u'fonts/AvenirNextRegular.ffont'}, {u'type': u'raw', u'name': u'AVENIR_BOLD_FFONT', u'file': u'fonts/AvenirNextDemiBold.ffont'}, {u'type': u'raw', u'name': u'WEATHER_GENERIC', u'file': u'data/WEATHER_GENERIC.pdc'}, {u'type': u'raw', u'name': u'WEATHER_THUNDERSTORM', u'file': u'data/THUNDERSTORM.pdc'}, {u'type': u'raw', u'name': u'WEATHER_RAINING_AND_SNOWING', u'file': u'data/RAINING_AND_SNOWING.pdc'}, {u'type': u'raw', u'name': u'WEATHER_PARTLY_CLOUDY_NIGHT', u'file': u'data/PARTLY_CLOUDY_NIGHT.pdc'}, {u'type': u'raw', u'name': u'WEATHER_PARTLY_CLOUDY', u'file': u'data/PARTLY_CLOUDY.pdc'}, {u'type': u'raw', u'name': u'WEATHER_LIGHT_SNOW', u'file': u'data/LIGHT_SNOW.pdc'}, {u'type': u'raw', u'name': u'WEATHER_LIGHT_RAIN', u'file': u'data/LIGHT_RAIN.pdc'}, {u'type': u'raw', u'name': u'WEATHER_HEAVY_SNOW', u'file': u'data/HEAVY_SNOW.pdc'}, {u'type': u'raw', u'name': u'WEATHER_HEAVY_RAIN', u'file': u'data/HEAVY_RAIN.pdc'}, {u'type': u'raw', u'name': u'DISCONNECTED', u'file': u'data/DISCONNECTED.pdc'}, {u'type': u'raw', u'name': u'DATE_BG', u'file': u'data/DATE_BG.pdc'}, {u'type': u'raw', u'name': u'WEATHER_CLOUDY', u'file': u'data/CLOUDY_DAY.pdc'}, {u'type': u'raw', u'name': u'WEATHER_CLEAR_NIGHT', u'file': u'data/CLEAR_NIGHT.pdc'}, {u'type': u'raw', u'name': u'WEATHER_CLEAR_DAY', u'file': u'data/CLEAR_DAY.pdc'}, {u'type': u'raw', u'name': u'BATTERY_CHARGE', u'file': u'data/BATTERY_CHARGE.pdc'}, {u'type': u'raw', u'name': u'BATTERY_BG', u'file': u'data/BATTERY_BG.pdc'}, {u'type': u'raw', u'name': u'HEALTH_SLEEP', u'file': u'data/HEALTH_SLEEP.pdc'}, {u'type': u'raw', u'name': u'HEALTH_STEPS', u'file': u'data/HEALTH_STEPS.pdc'}, {u'type': u'raw', u'name': u'HEALTH_HEART', u'file': u'data/HEALTH_HEART.pdc'}]}, 'name': u'timestyle-pebble'}
REQUESTED_PLATFORMS = [u'basalt', u'chalk', u'diorite', u'emery']
RESOURCES_JSON = [{u'menuIcon': True, u'type': u'bitmap', u'name': u'MENU_ICON', u'file': u'images/menuicon.png'}, {u'type': u'raw', u'name': u'LECO_REGULAR_FFONT', u'file': u'fonts/LECO1976-Regular.ffont'}, {u'type': u'raw', u'name': u'AVENIR_REGULAR_FFONT', u'file': u'fonts/AvenirNextRegular.ffont'}, {u'type': u'raw', u'name': u'AVENIR_BOLD_FFONT', u'file': u'fonts/AvenirNextDemiBold.ffont'}, {u'type': u'raw', u'name': u'WEATHER_GENERIC', u'file': u'data/WEATHER_GENERIC.pdc'}, {u'type': u'raw', u'name': u'WEATHER_THUNDERSTORM', u'file': u'data/THUNDERSTORM.pdc'}, {u'type': u'raw', u'name': u'WEATHER_RAINING_AND_SNOWING', u'file': u'data/RAINING_AND_SNOWING.pdc'}, {u'type': u'raw', u'name': u'WEATHER_PARTLY_CLOUDY_NIGHT', u'file': u'data/PARTLY_CLOUDY_NIGHT.pdc'}, {u'type': u'raw', u'name': u'WEATHER_PARTLY_CLOUDY', u'file': u'data/PARTLY_CLOUDY.pdc'}, {u'type': u'raw', u'name': u'WEATHER_LIGHT_SNOW', u'file': u'data/LIGHT_SNOW.pdc'}, {u'type': u'raw', u'name': u'WEATHER_LIGHT_RAIN', u'file': u'data/LIGHT_RAIN.pdc'}, {u'type': u'raw', u'name': u'WEATHER_HEAVY_SNOW', u'file': u'data/HEAVY_SNOW.pdc'}, {u'type': u'raw', u'name': u'WEATHER_HEAVY_RAIN', u'file': u'data/HEAVY_RAIN.pdc'}, {u'type': u'raw', u'name': u'DISCONNECTED', u'file': u'data/DISCONNECTED.pdc'}, {u'type': u'raw', u'name': u'DATE_BG', u'file': u'data/DATE_BG.pdc'}, {u'type': u'raw', u'name': u'WEATHER_CLOUDY', u'file': u'data/CLOUDY_DAY.pdc'}, {u'type': u'raw', u'name': u'WEATHER_CLEAR_NIGHT', u'file': u'data/CLEAR_NIGHT.pdc'}, {u'type': u'raw', u'name': u'WEATHER_CLEAR_DAY', u'file': u'data/CLEAR_DAY.pdc'}, {u'type': u'raw', u'name': u'BATTERY_CHARGE', u'file': u'data/BATTERY_CHARGE.pdc'}, {u'type': u'raw', u'name': u'BATTERY_BG', u'file': u'data/BATTERY_BG.pdc'}, {u'type': u'raw', u'name': u'HEALTH_SLEEP', u'file': u'data/HEALTH_SLEEP.pdc'}, {u'type': u'raw', u'name': u'HEALTH_STEPS', u'file': u'data/HEALTH_STEPS.pdc'}, {u'type': u'raw', u'name': u'HEALTH_HEART', u'file': u'data/HEALTH_HEART.pdc'}]
SANDBOX = False
SUPPORTED_PLATFORMS = ['basalt', 'aplite', 'diorite', 'chalk', 'emery']
TARGET_PLATFORMS = ['emery', 'diorite', 'chalk', 'basalt']
TIMESTAMP = 1608804524
USE_GROUPS = True
VERBOSE = 0
WEBPACK = '/Users/shawn/Library/Application Support/Pebble SDK/SDKs/current/node_modules/.bin/webpack'
| 344.892857 | 4,432 | 0.72362 | 1,461 | 9,657 | 4.684463 | 0.155373 | 0.035798 | 0.042081 | 0.057861 | 0.865868 | 0.860462 | 0.860462 | 0.860462 | 0.856517 | 0.839568 | 0 | 0.057966 | 0.069276 | 9,657 | 27 | 4,433 | 357.666667 | 0.703494 | 0 | 0 | 0 | 0 | 0.222222 | 0.614891 | 0.234234 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
ac69286a7d018bfa3c7eebff921f5dc7079524f0 | 13,334 | py | Python | myems-api/core/emailserver.py | xiaodao90/myems | faa10dfbb7c5537153e43d83981c7a6883c72285 | [
"MIT"
] | null | null | null | myems-api/core/emailserver.py | xiaodao90/myems | faa10dfbb7c5537153e43d83981c7a6883c72285 | [
"MIT"
] | null | null | null | myems-api/core/emailserver.py | xiaodao90/myems | faa10dfbb7c5537153e43d83981c7a6883c72285 | [
"MIT"
] | 1 | 2021-11-23T03:54:27.000Z | 2021-11-23T03:54:27.000Z | import falcon
import json
import mysql.connector
import config
import base64
import re
from core.useractivity import user_logger, access_control
class EmailServerCollection:
@staticmethod
def __init__():
""""Initializes EmailServerCollection"""
pass
@staticmethod
def on_options(req, resp):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp):
access_control(req)
cnx = mysql.connector.connect(**config.myems_fdd_db)
cursor = cnx.cursor()
query = (" SELECT id, host, port, requires_authentication, user_name, password, from_addr "
" FROM tbl_email_servers ")
cursor.execute(query)
rows = cursor.fetchall()
cursor.close()
cnx.disconnect()
result = list()
if rows is not None and len(rows) > 0:
for row in rows:
meta_result = {"id": row[0],
"host": row[1],
"port": row[2],
"requires_authentication": bool(row[3]),
"user_name": row[4],
"password": str(base64.b64decode(bytearray(row[5], 'utf-8')), 'utf-8')
if row[5] is not None else None,
"from_addr": row[6]}
result.append(meta_result)
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_post(req, resp):
"""Handles POST requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex)
new_values = json.loads(raw_json)
if 'host' not in new_values['data'].keys() or \
not isinstance(new_values['data']['host'], str) or \
len(str.strip(new_values['data']['host'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EMAIL_SERVER_HOST')
host = str.strip(new_values['data']['host'])
if 'port' not in new_values['data'].keys() or \
not isinstance(new_values['data']['port'], int) or \
new_values['data']['port'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_PORT')
port = float(new_values['data']['port'])
if 'requires_authentication' not in new_values['data'].keys() or \
not isinstance(new_values['data']['requires_authentication'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_REQUIRES_AUTHENTICATION')
requires_authentication = new_values['data']['requires_authentication']
if requires_authentication:
if 'user_name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['user_name'], str) or \
len(str.strip(new_values['data']['user_name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_USER_NAME')
user_name = new_values['data']['user_name']
else:
user_name = None
if requires_authentication:
if 'password' not in new_values['data'].keys() or \
not isinstance(new_values['data']['password'], str) or \
len(str.strip(new_values['data']['password'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_PASSWORD')
password = base64.b64encode(bytearray(new_values['data']['password'], 'utf-8'))
else:
password = None
if 'from_addr' not in new_values['data'].keys() or \
not isinstance(new_values['data']['from_addr'], str) or \
len(str.strip(new_values['data']['from_addr'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_FROM_ADDR')
from_addr = new_values['data']['from_addr']
match = re.match(r'^[_a-z0-9-]+(\.[_a-z0-9-]+)*@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,4})$', from_addr)
if match is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_FROM_ADDR')
cnx = mysql.connector.connect(**config.myems_fdd_db)
cursor = cnx.cursor()
cursor.execute(" SELECT host "
" FROM tbl_email_servers "
" WHERE host = %s ", (host,))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST',
description='API.EMAIL_SERVER_HOST_IS_ALREADY_IN_USE')
add_value = (" INSERT INTO tbl_email_servers "
" (host, port, requires_authentication, user_name, password, from_addr) "
" VALUES (%s, %s, %s, %s, %s, %s) ")
cursor.execute(add_value, (host,
port,
requires_authentication,
user_name,
password,
from_addr))
new_id = cursor.lastrowid
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_201
resp.location = '/emailservers/' + str(new_id)
class EmailServerItem:
@staticmethod
def __init__():
""""Initializes EmailServerItem"""
pass
@staticmethod
def on_options(req, resp, id_):
resp.status = falcon.HTTP_200
@staticmethod
def on_get(req, resp, id_):
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, '400 Bad Request')
cnx = mysql.connector.connect(**config.myems_fdd_db)
cursor = cnx.cursor()
query = (" SELECT id, host, port, requires_authentication, user_name, password, from_addr "
" FROM tbl_email_servers "
" WHERE id = %s ")
cursor.execute(query, (id_,))
row = cursor.fetchone()
cursor.close()
cnx.disconnect()
if row is None:
raise falcon.HTTPError(falcon.HTTP_404, 'API.NOT_FOUND')
result = {"id": row[0],
"host": row[1],
"port": row[2],
"requires_authentication": bool(row[3]),
"user_name": row[4],
"password": str(base64.b64decode(bytearray(row[5], 'utf-8')), 'utf-8')
if row[5] is not None else None,
"from_addr": row[5]}
resp.text = json.dumps(result)
@staticmethod
@user_logger
def on_delete(req, resp, id_):
"""Handles DELETE requests"""
access_control(req)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EMAIL_SERVER_ID')
cnx = mysql.connector.connect(**config.myems_fdd_db)
cursor = cnx.cursor()
cursor.execute(" SELECT host "
" FROM tbl_email_servers "
" WHERE id = %s ", (id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.EMAIL_SERVER_NOT_FOUND')
cursor.execute(" DELETE FROM tbl_email_servers WHERE id = %s ", (id_,))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_204
@staticmethod
@user_logger
def on_put(req, resp, id_):
"""Handles PUT requests"""
access_control(req)
try:
raw_json = req.stream.read().decode('utf-8')
except Exception as ex:
raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex)
if not id_.isdigit() or int(id_) <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EMAIL_SERVER_ID')
new_values = json.loads(raw_json)
if 'host' not in new_values['data'].keys() or \
not isinstance(new_values['data']['host'], str) or \
len(str.strip(new_values['data']['host'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_EMAIL_SERVER_HOST')
host = str.strip(new_values['data']['host'])
if 'port' not in new_values['data'].keys() or \
not isinstance(new_values['data']['port'], int) or \
new_values['data']['port'] <= 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_PORT')
port = float(new_values['data']['port'])
if 'requires_authentication' not in new_values['data'].keys() or \
not isinstance(new_values['data']['requires_authentication'], bool):
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_REQUIRES_AUTHENTICATION')
requires_authentication = new_values['data']['requires_authentication']
if requires_authentication:
if 'user_name' not in new_values['data'].keys() or \
not isinstance(new_values['data']['user_name'], str) or \
len(str.strip(new_values['data']['user_name'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_USER_NAME')
user_name = new_values['data']['user_name']
else:
user_name = None
if requires_authentication:
if 'password' not in new_values['data'].keys() or \
not isinstance(new_values['data']['password'], str) or \
len(str.strip(new_values['data']['password'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_PASSWORD')
password = base64.b64encode(bytearray(new_values['data']['password'], 'utf-8'))
else:
password = None
if 'from_addr' not in new_values['data'].keys() or \
not isinstance(new_values['data']['from_addr'], str) or \
len(str.strip(new_values['data']['from_addr'])) == 0:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_FROM_ADDR')
from_addr = new_values['data']['from_addr']
match = re.match(r'^[_a-z0-9-]+(\.[_a-z0-9-]+)*@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,4})$', from_addr)
if match is None:
raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST',
description='API.INVALID_FROM_ADDR')
cnx = mysql.connector.connect(**config.myems_fdd_db)
cursor = cnx.cursor()
cursor.execute(" SELECT id "
" FROM tbl_email_servers "
" WHERE id = %s ",
(id_,))
if cursor.fetchone() is None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND',
description='API.EMAIL_SERVER_NOT_FOUND')
cursor.execute(" SELECT host "
" FROM tbl_email_servers "
" WHERE host = %s AND id != %s ", (host, id_))
if cursor.fetchone() is not None:
cursor.close()
cnx.disconnect()
raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST',
description='API.EMAIL_SERVER_HOST_IS_ALREADY_IN_USE')
update_row = (" UPDATE tbl_email_servers "
" SET host = %s, port = %s, requires_authentication = %s, "
" user_name = %s, password = %s, from_addr = %s "
" WHERE id = %s ")
cursor.execute(update_row, (host,
port,
requires_authentication,
user_name,
password,
from_addr,
id_,))
cnx.commit()
cursor.close()
cnx.disconnect()
resp.status = falcon.HTTP_200
| 41.930818 | 109 | 0.528949 | 1,441 | 13,334 | 4.691187 | 0.101319 | 0.063905 | 0.088462 | 0.092308 | 0.872781 | 0.86213 | 0.861686 | 0.844822 | 0.840828 | 0.816864 | 0 | 0.019048 | 0.346408 | 13,334 | 317 | 110 | 42.063091 | 0.756627 | 0.00975 | 0 | 0.78626 | 0 | 0.007634 | 0.192594 | 0.064881 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034351 | false | 0.083969 | 0.026718 | 0 | 0.068702 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
ac7d2a80340ac1a3b47e235001380c39900c1274 | 312,317 | py | Python | openshift/client/apis/authorization_openshift_io_v1_api.py | TristanCacqueray/openshift-restclient-python | 7758cde7a8094acb279904f15c29e5fe3e9f7d33 | [
"Apache-2.0"
] | null | null | null | openshift/client/apis/authorization_openshift_io_v1_api.py | TristanCacqueray/openshift-restclient-python | 7758cde7a8094acb279904f15c29e5fe3e9f7d33 | [
"Apache-2.0"
] | null | null | null | openshift/client/apis/authorization_openshift_io_v1_api.py | TristanCacqueray/openshift-restclient-python | 7758cde7a8094acb279904f15c29e5fe3e9f7d33 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
OpenShift API (with Kubernetes)
OpenShift provides builds, application lifecycle, image content management, and administrative policy on top of Kubernetes. The API allows consistent management of those objects. All API operations are authenticated via an Authorization bearer token that is provided for service accounts as a generated secret (in JWT form) or via the native OAuth endpoint located at /oauth/authorize. Core infrastructure components may use client certificates that require no authentication. All API operations return a 'resourceVersion' string that represents the version of the object in the underlying storage. The standard LIST operation performs a snapshot read of the underlying objects, returning a resourceVersion representing a consistent version of the listed objects. The WATCH operation allows all updates to a set of objects after the provided resourceVersion to be observed by a client. By listing and beginning a watch from the returned resourceVersion, clients may observe a consistent view of the state of one or more objects. Note that WATCH always returns the update after the provided resourceVersion. Watch may be extended a limited time in the past - using etcd 2 the watch window is 1000 events (which on a large cluster may only be a few tens of seconds) so clients must explicitly handle the \"watch to old error\" by re-listing. Objects are divided into two rough categories - those that have a lifecycle and must reflect the state of the cluster, and those that have no state. Objects with lifecycle typically have three main sections: * 'metadata' common to all objects * a 'spec' that represents the desired state * a 'status' that represents how much of the desired state is reflected on the cluster at the current time Objects that have no state have 'metadata' but may lack a 'spec' or 'status' section. Objects are divided into those that are namespace scoped (only exist inside of a namespace) and those that are cluster scoped (exist outside of a namespace). A namespace scoped resource will be deleted when the namespace is deleted and cannot be created if the namespace has not yet been created or is in the process of deletion. Cluster scoped resources are typically only accessible to admins - resources like nodes, persistent volumes, and cluster policy. All objects have a schema that is a combination of the 'kind' and 'apiVersion' fields. This schema is additive only for any given version - no backwards incompatible changes are allowed without incrementing the apiVersion. The server will return and accept a number of standard responses that share a common schema - for instance, the common error type is 'metav1.Status' (described below) and will be returned on any error from the API server. The API is available in multiple serialization formats - the default is JSON (Accept: application/json and Content-Type: application/json) but clients may also use YAML (application/yaml) or the native Protobuf schema (application/vnd.kubernetes.protobuf). Note that the format of the WATCH API call is slightly different - for JSON it returns newline delimited objects while for Protobuf it returns length-delimited frames (4 bytes in network-order) that contain a 'versioned.Watch' Protobuf object. See the OpenShift documentation at https://docs.openshift.org for more information.
OpenAPI spec version: latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class AuthorizationOpenshiftIoV1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_cluster_role(self, body, **kwargs):
"""
create a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_cluster_role(body, async=True)
>>> result = thread.get()
:param async bool
:param V1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_cluster_role_with_http_info(body, **kwargs)
else:
(data) = self.create_cluster_role_with_http_info(body, **kwargs)
return data
def create_cluster_role_with_http_info(self, body, **kwargs):
"""
create a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_cluster_role_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_cluster_role`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterroles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_cluster_role_binding(self, body, **kwargs):
"""
create a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_cluster_role_binding(body, async=True)
>>> result = thread.get()
:param async bool
:param V1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_cluster_role_binding_with_http_info(body, **kwargs)
else:
(data) = self.create_cluster_role_binding_with_http_info(body, **kwargs)
return data
def create_cluster_role_binding_with_http_info(self, body, **kwargs):
"""
create a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_cluster_role_binding_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_cluster_role_binding`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterrolebindings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_local_resource_access_review_for_all_namespaces(self, body, **kwargs):
"""
create a LocalResourceAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_local_resource_access_review_for_all_namespaces(body, async=True)
>>> result = thread.get()
:param async bool
:param V1LocalResourceAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalResourceAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_local_resource_access_review_for_all_namespaces_with_http_info(body, **kwargs)
else:
(data) = self.create_local_resource_access_review_for_all_namespaces_with_http_info(body, **kwargs)
return data
def create_local_resource_access_review_for_all_namespaces_with_http_info(self, body, **kwargs):
"""
create a LocalResourceAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_local_resource_access_review_for_all_namespaces_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1LocalResourceAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalResourceAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_local_resource_access_review_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_local_resource_access_review_for_all_namespaces`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/localresourceaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1LocalResourceAccessReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_local_subject_access_review_for_all_namespaces(self, body, **kwargs):
"""
create a LocalSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_local_subject_access_review_for_all_namespaces(body, async=True)
>>> result = thread.get()
:param async bool
:param V1LocalSubjectAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_local_subject_access_review_for_all_namespaces_with_http_info(body, **kwargs)
else:
(data) = self.create_local_subject_access_review_for_all_namespaces_with_http_info(body, **kwargs)
return data
def create_local_subject_access_review_for_all_namespaces_with_http_info(self, body, **kwargs):
"""
create a LocalSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_local_subject_access_review_for_all_namespaces_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1LocalSubjectAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_local_subject_access_review_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_local_subject_access_review_for_all_namespaces`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/localsubjectaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1LocalSubjectAccessReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_local_resource_access_review(self, namespace, body, **kwargs):
"""
create a LocalResourceAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_local_resource_access_review(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1LocalResourceAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalResourceAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_namespaced_local_resource_access_review_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_local_resource_access_review_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_local_resource_access_review_with_http_info(self, namespace, body, **kwargs):
"""
create a LocalResourceAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_local_resource_access_review_with_http_info(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1LocalResourceAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalResourceAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_local_resource_access_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_local_resource_access_review`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_local_resource_access_review`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/localresourceaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1LocalResourceAccessReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_local_subject_access_review(self, namespace, body, **kwargs):
"""
create a LocalSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_local_subject_access_review(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1LocalSubjectAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_namespaced_local_subject_access_review_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_local_subject_access_review_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_local_subject_access_review_with_http_info(self, namespace, body, **kwargs):
"""
create a LocalSubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_local_subject_access_review_with_http_info(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1LocalSubjectAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1LocalSubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_local_subject_access_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_local_subject_access_review`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_local_subject_access_review`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/localsubjectaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1LocalSubjectAccessReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_role(self, namespace, body, **kwargs):
"""
create a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_role(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_namespaced_role_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_role_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_role_with_http_info(self, namespace, body, **kwargs):
"""
create a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_role_with_http_info(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/roles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_role_binding(self, namespace, body, **kwargs):
"""
create a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_role_binding(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_role_binding_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_role_binding_with_http_info(self, namespace, body, **kwargs):
"""
create a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_role_binding_with_http_info(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role_binding`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_role_binding_restriction(self, namespace, body, **kwargs):
"""
create a RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_role_binding_restriction(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBindingRestriction body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_namespaced_role_binding_restriction_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_role_binding_restriction_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_role_binding_restriction_with_http_info(self, namespace, body, **kwargs):
"""
create a RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_role_binding_restriction_with_http_info(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBindingRestriction body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_role_binding_restriction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_role_binding_restriction`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_role_binding_restriction`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindingrestrictions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingRestriction',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_self_subject_rules_review(self, namespace, body, **kwargs):
"""
create a SelfSubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_self_subject_rules_review(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1SelfSubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SelfSubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_namespaced_self_subject_rules_review_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_self_subject_rules_review_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_self_subject_rules_review_with_http_info(self, namespace, body, **kwargs):
"""
create a SelfSubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_self_subject_rules_review_with_http_info(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1SelfSubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SelfSubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_self_subject_rules_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_self_subject_rules_review`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_self_subject_rules_review`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/selfsubjectrulesreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1SelfSubjectRulesReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_subject_rules_review(self, namespace, body, **kwargs):
"""
create a SubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_subject_rules_review(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1SubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_namespaced_subject_rules_review_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_subject_rules_review_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_subject_rules_review_with_http_info(self, namespace, body, **kwargs):
"""
create a SubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_namespaced_subject_rules_review_with_http_info(namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1SubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_subject_rules_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_subject_rules_review`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_subject_rules_review`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/subjectrulesreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1SubjectRulesReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_resource_access_review(self, body, **kwargs):
"""
create a ResourceAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_resource_access_review(body, async=True)
>>> result = thread.get()
:param async bool
:param V1ResourceAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ResourceAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_resource_access_review_with_http_info(body, **kwargs)
else:
(data) = self.create_resource_access_review_with_http_info(body, **kwargs)
return data
def create_resource_access_review_with_http_info(self, body, **kwargs):
"""
create a ResourceAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_resource_access_review_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1ResourceAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ResourceAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_resource_access_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_resource_access_review`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/resourceaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ResourceAccessReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_role_binding_for_all_namespaces(self, body, **kwargs):
"""
create a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_role_binding_for_all_namespaces(body, async=True)
>>> result = thread.get()
:param async bool
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_role_binding_for_all_namespaces_with_http_info(body, **kwargs)
else:
(data) = self.create_role_binding_for_all_namespaces_with_http_info(body, **kwargs)
return data
def create_role_binding_for_all_namespaces_with_http_info(self, body, **kwargs):
"""
create a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_role_binding_for_all_namespaces_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_role_binding_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_role_binding_for_all_namespaces`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/rolebindings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_role_binding_restriction_for_all_namespaces(self, body, **kwargs):
"""
create a RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_role_binding_restriction_for_all_namespaces(body, async=True)
>>> result = thread.get()
:param async bool
:param V1RoleBindingRestriction body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_role_binding_restriction_for_all_namespaces_with_http_info(body, **kwargs)
else:
(data) = self.create_role_binding_restriction_for_all_namespaces_with_http_info(body, **kwargs)
return data
def create_role_binding_restriction_for_all_namespaces_with_http_info(self, body, **kwargs):
"""
create a RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_role_binding_restriction_for_all_namespaces_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1RoleBindingRestriction body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_role_binding_restriction_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_role_binding_restriction_for_all_namespaces`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/rolebindingrestrictions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingRestriction',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_role_for_all_namespaces(self, body, **kwargs):
"""
create a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_role_for_all_namespaces(body, async=True)
>>> result = thread.get()
:param async bool
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_role_for_all_namespaces_with_http_info(body, **kwargs)
else:
(data) = self.create_role_for_all_namespaces_with_http_info(body, **kwargs)
return data
def create_role_for_all_namespaces_with_http_info(self, body, **kwargs):
"""
create a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_role_for_all_namespaces_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_role_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_role_for_all_namespaces`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/roles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_self_subject_rules_review_for_all_namespaces(self, body, **kwargs):
"""
create a SelfSubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_self_subject_rules_review_for_all_namespaces(body, async=True)
>>> result = thread.get()
:param async bool
:param V1SelfSubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SelfSubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_self_subject_rules_review_for_all_namespaces_with_http_info(body, **kwargs)
else:
(data) = self.create_self_subject_rules_review_for_all_namespaces_with_http_info(body, **kwargs)
return data
def create_self_subject_rules_review_for_all_namespaces_with_http_info(self, body, **kwargs):
"""
create a SelfSubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_self_subject_rules_review_for_all_namespaces_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1SelfSubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SelfSubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_self_subject_rules_review_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_self_subject_rules_review_for_all_namespaces`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/selfsubjectrulesreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1SelfSubjectRulesReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_subject_access_review(self, body, **kwargs):
"""
create a SubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_subject_access_review(body, async=True)
>>> result = thread.get()
:param async bool
:param V1SubjectAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_subject_access_review_with_http_info(body, **kwargs)
else:
(data) = self.create_subject_access_review_with_http_info(body, **kwargs)
return data
def create_subject_access_review_with_http_info(self, body, **kwargs):
"""
create a SubjectAccessReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_subject_access_review_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1SubjectAccessReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SubjectAccessReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_subject_access_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_subject_access_review`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/subjectaccessreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1SubjectAccessReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_subject_rules_review_for_all_namespaces(self, body, **kwargs):
"""
create a SubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_subject_rules_review_for_all_namespaces(body, async=True)
>>> result = thread.get()
:param async bool
:param V1SubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_subject_rules_review_for_all_namespaces_with_http_info(body, **kwargs)
else:
(data) = self.create_subject_rules_review_for_all_namespaces_with_http_info(body, **kwargs)
return data
def create_subject_rules_review_for_all_namespaces_with_http_info(self, body, **kwargs):
"""
create a SubjectRulesReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_subject_rules_review_for_all_namespaces_with_http_info(body, async=True)
>>> result = thread.get()
:param async bool
:param V1SubjectRulesReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1SubjectRulesReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_subject_rules_review_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_subject_rules_review_for_all_namespaces`")
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/subjectrulesreviews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1SubjectRulesReview',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_role(self, name, body, **kwargs):
"""
delete a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_cluster_role(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_cluster_role_with_http_info(name, body, **kwargs)
else:
(data) = self.delete_cluster_role_with_http_info(name, body, **kwargs)
return data
def delete_cluster_role_with_http_info(self, name, body, **kwargs):
"""
delete a ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_cluster_role_with_http_info(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterroles/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_cluster_role_binding(self, name, body, **kwargs):
"""
delete a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_cluster_role_binding(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_cluster_role_binding_with_http_info(name, body, **kwargs)
else:
(data) = self.delete_cluster_role_binding_with_http_info(name, body, **kwargs)
return data
def delete_cluster_role_binding_with_http_info(self, name, body, **kwargs):
"""
delete a ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_cluster_role_binding_with_http_info(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_cluster_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_cluster_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterrolebindings/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_role_binding_restriction(self, namespace, **kwargs):
"""
delete collection of RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_collection_namespaced_role_binding_restriction(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_collection_namespaced_role_binding_restriction_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_role_binding_restriction_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_role_binding_restriction_with_http_info(self, namespace, **kwargs):
"""
delete collection of RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_collection_namespaced_role_binding_restriction_with_http_info(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_role_binding_restriction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_role_binding_restriction`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindingrestrictions', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_role(self, name, namespace, body, **kwargs):
"""
delete a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_namespaced_role(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_role_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_role_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_namespaced_role_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/roles/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_role_binding(self, name, namespace, body, **kwargs):
"""
delete a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_namespaced_role_binding(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_namespaced_role_binding_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindings/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_role_binding_restriction(self, name, namespace, body, **kwargs):
"""
delete a RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_namespaced_role_binding_restriction(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.delete_namespaced_role_binding_restriction_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_role_binding_restriction_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_role_binding_restriction_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_namespaced_role_binding_restriction_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_role_binding_restriction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_role_binding_restriction`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_role_binding_restriction`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_role_binding_restriction`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindingrestrictions/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_resources(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_resources_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_role(self, **kwargs):
"""
list objects of kind ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_cluster_role(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_cluster_role_with_http_info(**kwargs)
else:
(data) = self.list_cluster_role_with_http_info(**kwargs)
return data
def list_cluster_role_with_http_info(self, **kwargs):
"""
list objects of kind ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_cluster_role_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_role" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterroles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_cluster_role_binding(self, **kwargs):
"""
list objects of kind ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_cluster_role_binding(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_cluster_role_binding_with_http_info(**kwargs)
else:
(data) = self.list_cluster_role_binding_with_http_info(**kwargs)
return data
def list_cluster_role_binding_with_http_info(self, **kwargs):
"""
list objects of kind ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_cluster_role_binding_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ClusterRoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterrolebindings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBindingList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_role(self, namespace, **kwargs):
"""
list objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_namespaced_role(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_namespaced_role_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_role_with_http_info(namespace, **kwargs)
return data
def list_namespaced_role_with_http_info(self, namespace, **kwargs):
"""
list objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_namespaced_role_with_http_info(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_role_binding(self, namespace, **kwargs):
"""
list objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_namespaced_role_binding(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_namespaced_role_binding_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_role_binding_with_http_info(namespace, **kwargs)
return data
def list_namespaced_role_binding_with_http_info(self, namespace, **kwargs):
"""
list objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_namespaced_role_binding_with_http_info(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role_binding`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_role_binding_restriction(self, namespace, **kwargs):
"""
list or watch objects of kind RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_namespaced_role_binding_restriction(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingRestrictionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_namespaced_role_binding_restriction_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_role_binding_restriction_with_http_info(namespace, **kwargs)
return data
def list_namespaced_role_binding_restriction_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_namespaced_role_binding_restriction_with_http_info(namespace, async=True)
>>> result = thread.get()
:param async bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingRestrictionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_role_binding_restriction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_role_binding_restriction`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindingrestrictions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingRestrictionList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_role_binding_for_all_namespaces(self, **kwargs):
"""
list objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_role_binding_for_all_namespaces(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_role_binding_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_role_binding_for_all_namespaces_with_http_info(**kwargs)
return data
def list_role_binding_for_all_namespaces_with_http_info(self, **kwargs):
"""
list objects of kind RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_role_binding_for_all_namespaces_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_role_binding_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/rolebindings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_role_binding_restriction_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_role_binding_restriction_for_all_namespaces(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingRestrictionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_role_binding_restriction_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_role_binding_restriction_for_all_namespaces_with_http_info(**kwargs)
return data
def list_role_binding_restriction_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_role_binding_restriction_for_all_namespaces_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleBindingRestrictionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_role_binding_restriction_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/rolebindingrestrictions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingRestrictionList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_role_for_all_namespaces(self, **kwargs):
"""
list objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_role_for_all_namespaces(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_role_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_role_for_all_namespaces_with_http_info(**kwargs)
return data
def list_role_for_all_namespaces_with_http_info(self, **kwargs):
"""
list objects of kind Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_role_for_all_namespaces_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server the server will respond with a 410 ResourceExpired error indicating the client must restart their list without the continue field. This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1RoleList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', '_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_role_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_role(self, name, body, **kwargs):
"""
partially update the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_cluster_role(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.patch_cluster_role_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_cluster_role_with_http_info(name, body, **kwargs)
return data
def patch_cluster_role_with_http_info(self, name, body, **kwargs):
"""
partially update the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_cluster_role_with_http_info(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterroles/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_cluster_role_binding(self, name, body, **kwargs):
"""
partially update the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_cluster_role_binding(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.patch_cluster_role_binding_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_cluster_role_binding_with_http_info(name, body, **kwargs)
return data
def patch_cluster_role_binding_with_http_info(self, name, body, **kwargs):
"""
partially update the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_cluster_role_binding_with_http_info(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_cluster_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_cluster_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterrolebindings/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_role(self, name, namespace, body, **kwargs):
"""
partially update the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_namespaced_role(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_role_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_role_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_namespaced_role_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/roles/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_role_binding(self, name, namespace, body, **kwargs):
"""
partially update the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_namespaced_role_binding(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_namespaced_role_binding_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindings/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_role_binding_restriction(self, name, namespace, body, **kwargs):
"""
partially update the specified RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_namespaced_role_binding_restriction(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.patch_namespaced_role_binding_restriction_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_role_binding_restriction_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_role_binding_restriction_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.patch_namespaced_role_binding_restriction_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_role_binding_restriction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_role_binding_restriction`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_role_binding_restriction`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_role_binding_restriction`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindingrestrictions/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingRestriction',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_cluster_role(self, name, **kwargs):
"""
read the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_cluster_role(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.read_cluster_role_with_http_info(name, **kwargs)
else:
(data) = self.read_cluster_role_with_http_info(name, **kwargs)
return data
def read_cluster_role_with_http_info(self, name, **kwargs):
"""
read the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_cluster_role_with_http_info(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_cluster_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterroles/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_cluster_role_binding(self, name, **kwargs):
"""
read the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_cluster_role_binding(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.read_cluster_role_binding_with_http_info(name, **kwargs)
else:
(data) = self.read_cluster_role_binding_with_http_info(name, **kwargs)
return data
def read_cluster_role_binding_with_http_info(self, name, **kwargs):
"""
read the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_cluster_role_binding_with_http_info(name, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_cluster_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterrolebindings/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_role(self, name, namespace, **kwargs):
"""
read the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_namespaced_role(name, namespace, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.read_namespaced_role_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_role_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_role_with_http_info(self, name, namespace, **kwargs):
"""
read the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_namespaced_role_with_http_info(name, namespace, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/roles/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_role_binding(self, name, namespace, **kwargs):
"""
read the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_namespaced_role_binding(name, namespace, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_role_binding_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_role_binding_with_http_info(self, name, namespace, **kwargs):
"""
read the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_namespaced_role_binding_with_http_info(name, namespace, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindings/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_role_binding_restriction(self, name, namespace, **kwargs):
"""
read the specified RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_namespaced_role_binding_restriction(name, namespace, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.read_namespaced_role_binding_restriction_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_role_binding_restriction_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_role_binding_restriction_with_http_info(self, name, namespace, **kwargs):
"""
read the specified RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.read_namespaced_role_binding_restriction_with_http_info(name, namespace, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_role_binding_restriction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_role_binding_restriction`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_role_binding_restriction`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'exact' in params:
query_params.append(('exact', params['exact']))
if 'export' in params:
query_params.append(('export', params['export']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindingrestrictions/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingRestriction',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_role(self, name, body, **kwargs):
"""
replace the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_cluster_role(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param V1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.replace_cluster_role_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_cluster_role_with_http_info(name, body, **kwargs)
return data
def replace_cluster_role_with_http_info(self, name, body, **kwargs):
"""
replace the specified ClusterRole
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_cluster_role_with_http_info(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRole (required)
:param V1ClusterRole body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterroles/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRole',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_cluster_role_binding(self, name, body, **kwargs):
"""
replace the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_cluster_role_binding(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param V1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.replace_cluster_role_binding_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_cluster_role_binding_with_http_info(name, body, **kwargs)
return data
def replace_cluster_role_binding_with_http_info(self, name, body, **kwargs):
"""
replace the specified ClusterRoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_cluster_role_binding_with_http_info(name, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the ClusterRoleBinding (required)
:param V1ClusterRoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ClusterRoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_cluster_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_cluster_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_cluster_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/clusterrolebindings/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ClusterRoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_role(self, name, namespace, body, **kwargs):
"""
replace the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_namespaced_role(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_role_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_role_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified Role
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_namespaced_role_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the Role (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1Role body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/roles/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Role',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_role_binding(self, name, namespace, body, **kwargs):
"""
replace the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_namespaced_role_binding(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_role_binding_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_role_binding_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified RoleBinding
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_namespaced_role_binding_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBinding (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBinding body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBinding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_role_binding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role_binding`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role_binding`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role_binding`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindings/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBinding',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_role_binding_restriction(self, name, namespace, body, **kwargs):
"""
replace the specified RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_namespaced_role_binding_restriction(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBindingRestriction body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.replace_namespaced_role_binding_restriction_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_role_binding_restriction_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_role_binding_restriction_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified RoleBindingRestriction
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_namespaced_role_binding_restriction_with_http_info(name, namespace, body, async=True)
>>> result = thread.get()
:param async bool
:param str name: name of the RoleBindingRestriction (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1RoleBindingRestriction body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1RoleBindingRestriction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_role_binding_restriction" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_role_binding_restriction`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_role_binding_restriction`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_role_binding_restriction`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken', 'Oauth2AccessToken', 'Oauth2Implicit']
return self.api_client.call_api('/apis/authorization.openshift.io/v1/namespaces/{namespace}/rolebindingrestrictions/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1RoleBindingRestriction',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 58.008358 | 3,325 | 0.632108 | 36,195 | 312,317 | 5.292471 | 0.015831 | 0.020547 | 0.017838 | 0.018041 | 0.982559 | 0.98198 | 0.981656 | 0.980231 | 0.978487 | 0.977046 | 0 | 0.002028 | 0.294342 | 312,317 | 5,383 | 3,326 | 58.019134 | 0.867167 | 0.02217 | 0 | 0.849533 | 0 | 0 | 0.225022 | 0.083491 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.002075 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
3bac01d56a2c25da1cf8749c260f7741752f2f61 | 22,902 | py | Python | src/linsys.py | dongrenguang/linear-algebra | 46dcd8e09ccd05702c12af6f8e9eccb95b0fb916 | [
"MIT"
] | null | null | null | src/linsys.py | dongrenguang/linear-algebra | 46dcd8e09ccd05702c12af6f8e9eccb95b0fb916 | [
"MIT"
] | null | null | null | src/linsys.py | dongrenguang/linear-algebra | 46dcd8e09ccd05702c12af6f8e9eccb95b0fb916 | [
"MIT"
] | null | null | null | # coding=utf-8
from decimal import Decimal, getcontext
from copy import deepcopy
from vector import Vector
from plane import Plane
getcontext().prec = 30
class LinearSystem(object):
ALL_PLANES_MUST_BE_IN_SAME_DIM_MSG = 'All planes in the system should live in the same dimension'
NO_SOLUTIONS_MSG = 'No solutions'
INF_SOLUTIONS_MSG = 'Infinitely many solutions'
def __init__(self, planes):
try:
d = planes[0].dimension
for p in planes:
assert p.dimension == d
self.planes = planes
self.dimension = d
except AssertionError:
raise Exception(self.ALL_PLANES_MUST_BE_IN_SAME_DIM_MSG)
def swap_rows(self, row1, row2):
self[row1], self[row2] = self[row2], self[row1]
def multiply_coefficient_and_row(self, coefficient, row):
self[row] = self[row].time_scalar(coefficient)
def add_multiple_times_row_to_row(self, coefficient, row_to_add, row_to_be_added_to):
new_row = self[row_to_be_added_to].plus(self[row_to_add].time_scalar(coefficient))
self[row_to_be_added_to] = new_row
# 转化方程式为倒三角形矩阵
def compute_triangular_form(self):
system = deepcopy(self)
l = len(system)
for i in range(0, l - 1):
indices = system.indices_of_first_nonzero_terms_in_each_row()
# 交换
if indices[i] != 0:
first = -1
for j in range(i + 1, l):
if indices[j] == 0:
first = j
break
if first != -1:
system.swap_rows(i, first)
# 处理i行以下的plane
for k in range(i + 1, l):
if indices[k] == i:
times = -system[k].normal_vector[i] / system[i].normal_vector[i]
system.add_multiple_times_row_to_row(times, i, k)
return system
# 计算最简形式的倒三角矩阵
def compute_rref(self):
tf = self.compute_triangular_form()
num_equations = len(tf)
pivot_indices = tf.indices_of_first_nonzero_terms_in_each_row()
for i in range(num_equations)[::-1]:
j = pivot_indices[i]
if j < 0:
continue
tf.scale_row_to_make_coefficient_equal_one(i, j)
tf.clear_coefficients_above(i, j)
return tf
''' 我自己的方法
tf = self.compute_triangular_form()
l = len(tf)
dimension = self.dimension
i = l - 1
min = l
if dimension < min:
min = dimension
while i >= 0:
# 归一化
z = tf[i].normal_vector[indices[i]]
if not MyDecimal(z).is_near_zero():
times = Decimal('1.0') / z
tf[i] = tf[i].time_scalar(times)
# 去砸项
if i < dimension - 1:
for j in range(i + 1, min):
b = tf[i].normal_vector[j]
a = tf[j].normal_vector[j]
if not MyDecimal(a).is_near_zero():
times2 = - b / a
tf.add_multiple_times_row_to_row(times2, j, i)
i -= 1
return tf
'''
def scale_row_to_make_coefficient_equal_one(self, row, col):
n = self[row].normal_vector
beta = Decimal('1.0') / n[col]
self.multiply_coefficient_and_row(beta, row)
def clear_coefficients_above(self, row, col):
for k in range(row)[::-1]:
n = self[k].normal_vector
alpha = -n[col]
self.add_multiple_times_row_to_row(alpha, row, k)
def do_gaussian_elimination_and_extract_solution(self):
rref = self.compute_rref()
rref.raise_exception_if_contradictory_equation()
rref.raise_exception_if_too_few_pivots()
num_variables = self.dimension
solution_coordinates = [rref.planes[i].constant_term for i in range(num_variables)]
return Vector(solution_coordinates)
def raise_exception_if_contradictory_equation(self):
for p in self.planes:
try:
p.first_nonzero_index(p.normal_vector)
except Exception as e:
if str(e) == 'No nonzero elements found':
constant_term = MyDecimal(p.constant_term)
if not constant_term.is_near_zero():
raise Exception(self.NO_SOLUTIONS_MSG)
else:
raise e
def raise_exception_if_too_few_pivots(self):
pivot_indices = self.indices_of_first_nonzero_terms_in_each_row()
num_pivots = sum([1 if index >= 0 else 0 for index in pivot_indices])
num_variables = self.dimension
if num_pivots < num_variables:
raise Exception(self.INF_SOLUTIONS_MSG)
def indices_of_first_nonzero_terms_in_each_row(self):
num_equations = len(self)
num_variables = self.dimension
indices = [-1] * num_equations
for i,p in enumerate(self.planes):
try:
indices[i] = p.first_nonzero_index(p.normal_vector)
except Exception as e:
if str(e) == Plane.NO_NONZERO_ELTS_FOUND_MSG:
continue
else:
raise e
return indices
def __len__(self):
return len(self.planes)
def __getitem__(self, i):
return self.planes[i]
def __setitem__(self, i, x):
try:
assert x.dimension == self.dimension
self.planes[i] = x
except AssertionError:
raise Exception(self.ALL_PLANES_MUST_BE_IN_SAME_DIM_MSG)
def __str__(self):
ret = 'Linear System:\n'
temp = ['Equation {}: {}'.format(i+1,p) for i,p in enumerate(self.planes)]
ret += '\n'.join(temp)
return ret
class MyDecimal(Decimal):
def is_near_zero(self, eps=1e-10):
return abs(self) < eps
class Parametrization(object):
BASEPT_AND_DIR_VECTORS_MUST_BE_IN_SAME_DIM_MSG = 'The basepoint and direction vectors should all live in the same dimension'
def __init__(self, basepoint, direction_vectors):
self.basepoint = basepoint
self.direction_vectors = direction_vectors
self.dimension = self.basepoint.dimension
try:
for v in direction_vectors:
assert v.dimension == self.dimension
except AssertionError:
raise Exception(BASEPT_AND_DIR_VECTORS_MUST_BE_IN_SAME_DIM_MSG)
# p0 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p1 = Plane(normal_vector=Vector(['0','1','0']), constant_term='2')
# p2 = Plane(normal_vector=Vector(['1','1','-1']), constant_term='3')
# p3 = Plane(normal_vector=Vector(['1','0','-2']), constant_term='2')
#
# s = LinearSystem([p0,p1,p2,p3])
# print s.indices_of_first_nonzero_terms_in_each_row()
# print '{},{},{},{}'.format(s[0],s[1],s[2],s[3])
# print len(s)
# print s
# s.swap_rows(0, 3)
# s.multiply_coefficient_and_row(2, 3)
# s.add_multiple_times_row_to_row(1, 3, 2)
# print s
# system = s.compute_triangular_form()
# print system
# p1 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['0','1','1']), constant_term='2')
# s = LinearSystem([p1,p2])
# r = s.compute_rref()
# if not (r[0] == Plane(normal_vector=Vector(['1','0','0']), constant_term='-1') and
# r[1] == p2):
# print 'test case 1 failed'
# else:
# print 'Pass case 1'
#
# p1 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['1','1','1']), constant_term='2')
# s = LinearSystem([p1,p2])
# r = s.compute_rref()
# if not (r[0] == p1 and
# r[1] == Plane(constant_term='1')):
# print 'test case 2 failed'
# else:
# print 'Pass case 2'
#
# p1 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['0','1','0']), constant_term='2')
# p3 = Plane(normal_vector=Vector(['1','1','-1']), constant_term='3')
# p4 = Plane(normal_vector=Vector(['1','0','-2']), constant_term='2')
# s = LinearSystem([p1,p2,p3,p4])
# r = s.compute_rref()
# if not (r[0] == Plane(normal_vector=Vector(['1','0','0']), constant_term='0') and
# r[1] == p2 and
# r[2] == Plane(normal_vector=Vector(['0','0','-2']), constant_term='2') and
# r[3] == Plane()):
# print 'test case 3 failed'
# else:
# print 'Pass case 3'
#
#
# p1 = Plane(normal_vector=Vector(['0','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['1','-1','1']), constant_term='2')
# p3 = Plane(normal_vector=Vector(['1','2','-5']), constant_term='3')
# s = LinearSystem([p1,p2,p3])
# r = s.compute_rref()
# if not (r[0] == Plane(normal_vector=Vector(['1','0','0']), constant_term=Decimal('23')/Decimal('9')) and
# r[1] == Plane(normal_vector=Vector(['0','1','0']), constant_term=Decimal('7')/Decimal('9')) and
# r[2] == Plane(normal_vector=Vector(['0','0','1']), constant_term=Decimal('2')/Decimal('9'))):
# print 'test case 4 failed'
# else:
# print 'Pass case 4'
# p0 = Plane(normal_vector=Vector(['5.862',' 1.178','-10.366']), constant_term='-8.15')
# p1 = Plane(normal_vector=Vector(['-2.931','-0.589','5.183']), constant_term='-4.075')
# p0 = Plane(normal_vector=Vector(['8.631','5.112','-1.816']), constant_term='-5.113')
# p1 = Plane(normal_vector=Vector(['4.315','11.132','-5.27']), constant_term='-6.775')
# p2 = Plane(normal_vector=Vector(['-2.158','3.01','-1.727']), constant_term='-0.831')
# p0 = Plane(normal_vector=Vector(['5.262','2.739','-9.878']), constant_term='-3.441')
# p1 = Plane(normal_vector=Vector(['5.111','6.358','7.638']), constant_term='-2.152')
# p2 = Plane(normal_vector=Vector(['2.016','-9.924','-1.367']), constant_term='-9.278')
# p3 = Plane(normal_vector=Vector(['2.167','-13.593','-18.883']), constant_term='-10.567')
# p0 = Plane(Vector(['0.786', '0.786', '0.588']), '-0.714')
# p1 = Plane(Vector(['-0.138', '-0.138', '0.244']), '0.319')
p0 = Plane(Vector(['8.631', '5.112', '-1.816']), '-5.113')
p1 = Plane(Vector(['4.315', '11.132', '-5.27']), '-6.775')
p2 = Plane(Vector(['-2.158', '3.01', '-1.727']), '-0.831')
#p1 = Plane(Vector([]), )# coding=utf-8
from decimal import Decimal, getcontext
from copy import deepcopy
from vector import Vector
from plane import Plane
getcontext().prec = 30
class LinearSystem(object):
ALL_PLANES_MUST_BE_IN_SAME_DIM_MSG = 'All planes in the system should live in the same dimension'
NO_SOLUTIONS_MSG = 'No solutions'
INF_SOLUTIONS_MSG = 'Infinitely many solutions'
def __init__(self, planes):
try:
d = planes[0].dimension
for p in planes:
assert p.dimension == d
self.planes = planes
self.dimension = d
except AssertionError:
raise Exception(self.ALL_PLANES_MUST_BE_IN_SAME_DIM_MSG)
def swap_rows(self, row1, row2):
self[row1], self[row2] = self[row2], self[row1]
def multiply_coefficient_and_row(self, coefficient, row):
self[row] = self[row].time_scalar(coefficient)
def add_multiple_times_row_to_row(self, coefficient, row_to_add, row_to_be_added_to):
new_row = self[row_to_be_added_to].plus(self[row_to_add].time_scalar(coefficient))
self[row_to_be_added_to] = new_row
# 转化方程式为倒三角形矩阵
def compute_triangular_form(self):
system = deepcopy(self)
l = len(system)
for i in range(0, l - 1):
indices = system.indices_of_first_nonzero_terms_in_each_row()
# 交换
if indices[i] != 0:
first = -1
for j in range(i + 1, l):
if indices[j] == 0:
first = j
break
if first != -1:
system.swap_rows(i, first)
# 处理i行以下的plane
for k in range(i + 1, l):
if indices[k] == i:
times = -system[k].normal_vector[i] / system[i].normal_vector[i]
system.add_multiple_times_row_to_row(times, i, k)
return system
# 计算最简形式的倒三角矩阵
def compute_rref(self):
tf = self.compute_triangular_form()
num_equations = len(tf)
pivot_indices = tf.indices_of_first_nonzero_terms_in_each_row()
for i in range(num_equations)[::-1]:
j = pivot_indices[i]
if j < 0:
continue
tf.scale_row_to_make_coefficient_equal_one(i, j)
tf.clear_coefficients_above(i, j)
return tf
''' 我自己的方法
tf = self.compute_triangular_form()
l = len(tf)
dimension = self.dimension
i = l - 1
min = l
if dimension < min:
min = dimension
while i >= 0:
# 归一化
z = tf[i].normal_vector[indices[i]]
if not MyDecimal(z).is_near_zero():
times = Decimal('1.0') / z
tf[i] = tf[i].time_scalar(times)
# 去砸项
if i < dimension - 1:
for j in range(i + 1, min):
b = tf[i].normal_vector[j]
a = tf[j].normal_vector[j]
if not MyDecimal(a).is_near_zero():
times2 = - b / a
tf.add_multiple_times_row_to_row(times2, j, i)
i -= 1
return tf
'''
def scale_row_to_make_coefficient_equal_one(self, row, col):
n = self[row].normal_vector
beta = Decimal('1.0') / n[col]
self.multiply_coefficient_and_row(beta, row)
def clear_coefficients_above(self, row, col):
for k in range(row)[::-1]:
n = self[k].normal_vector
alpha = -n[col]
self.add_multiple_times_row_to_row(alpha, row, k)
def do_gaussian_elimination_and_extract_solution(self):
rref = self.compute_rref()
rref.raise_exception_if_contradictory_equation()
rref.raise_exception_if_too_few_pivots()
num_variables = self.dimension
solution_coordinates = [rref.planes[i].constant_term for i in range(num_variables)]
return Vector(solution_coordinates)
def raise_exception_if_contradictory_equation(self):
for p in self.planes:
try:
p.first_nonzero_index(p.normal_vector)
except Exception as e:
if str(e) == 'No nonzero elements found':
constant_term = MyDecimal(p.constant_term)
if not constant_term.is_near_zero():
raise Exception(self.NO_SOLUTIONS_MSG)
else:
raise e
def raise_exception_if_too_few_pivots(self):
pivot_indices = self.indices_of_first_nonzero_terms_in_each_row()
num_pivots = sum([1 if index >= 0 else 0 for index in pivot_indices])
num_variables = self.dimension
if num_pivots < num_variables:
raise Exception(self.INF_SOLUTIONS_MSG)
def indices_of_first_nonzero_terms_in_each_row(self):
num_equations = len(self)
num_variables = self.dimension
indices = [-1] * num_equations
for i,p in enumerate(self.planes):
try:
indices[i] = p.first_nonzero_index(p.normal_vector)
except Exception as e:
if str(e) == Plane.NO_NONZERO_ELTS_FOUND_MSG:
continue
else:
raise e
return indices
def compute_solution(self):
try:
return self.do_gaussian_elimination_and_parametrize_solution()
except Exception as e:
if str(e) == self.NO_SOLUTIONS_MSG:
return str(e)
else:
raise e
def do_gaussian_elimination_and_parametrize_solution(self):
rref = self.compute_rref()
print rref
rref.raise_exception_if_contradictory_equation()
direction_vectors = rref.extract_direction_vectors_for_parametrization()
basepoint = rref.extract_basepoint_vectors_for_parametrization()
return Parametrization(basepoint, direction_vectors)
def extract_direction_vectors_for_parametrization(self):
num_variables = self.dimension
pivot_indices = self.indices_of_first_nonzero_terms_in_each_row()
free_variable_indices = set(range(num_variables)) - set(pivot_indices)
direction_vectors = []
for free_var in free_variable_indices:
vector_coords = [0] * num_variables
vector_coords[free_var] = 1
for i, p in enumerate(self.planes):
pivot_var = pivot_indices[i]
if pivot_var < 0:
break
vector_coords[i] = -p.normal_vector[free_var]
direction_vectors.append(Vector(vector_coords))
return direction_vectors
def extract_basepoint_vectors_for_parametrization(self):
num_variables = self.dimension
pivot_indices = self.indices_of_first_nonzero_terms_in_each_row()
basepoint_coords = [0] * num_variables
for i, p in enumerate(self.planes):
pivot_var = pivot_indices[i]
if pivot_var < 0:
break
basepoint_coords[pivot_var] = p.constant_term
return Vector(basepoint_coords)
def __len__(self):
return len(self.planes)
def __getitem__(self, i):
return self.planes[i]
def __setitem__(self, i, x):
try:
assert x.dimension == self.dimension
self.planes[i] = x
except AssertionError:
raise Exception(self.ALL_PLANES_MUST_BE_IN_SAME_DIM_MSG)
def __str__(self):
ret = 'Linear System:\n'
temp = ['Equation {}: {}'.format(i+1,p) for i,p in enumerate(self.planes)]
ret += '\n'.join(temp)
return ret
class MyDecimal(Decimal):
def is_near_zero(self, eps=1e-10):
return abs(self) < eps
class Parametrization(object):
BASEPT_AND_DIR_VECTORS_MUST_BE_IN_SAME_DIM_MSG = 'The basepoint and direction vectors should all live in the same dimension'
def __init__(self, basepoint, direction_vectors):
self.basepoint = basepoint
self.direction_vectors = direction_vectors
self.dimension = self.basepoint.dimension
try:
for v in direction_vectors:
assert v.dimension == self.dimension
except AssertionError:
raise Exception(BASEPT_AND_DIR_VECTORS_MUST_BE_IN_SAME_DIM_MSG)
def __str__(self):
result = 'basepoint:\n' + str(self.basepoint) + '\n\n'
result += 'direction_vectors:\n'
for v in self.direction_vectors:
result += str(v) + '\n'
return result
# p0 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p1 = Plane(normal_vector=Vector(['0','1','0']), constant_term='2')
# p2 = Plane(normal_vector=Vector(['1','1','-1']), constant_term='3')
# p3 = Plane(normal_vector=Vector(['1','0','-2']), constant_term='2')
#
# s = LinearSystem([p0,p1,p2,p3])
# print s.indices_of_first_nonzero_terms_in_each_row()
# print '{},{},{},{}'.format(s[0],s[1],s[2],s[3])
# print len(s)
# print s
# s.swap_rows(0, 3)
# s.multiply_coefficient_and_row(2, 3)
# s.add_multiple_times_row_to_row(1, 3, 2)
# print s
# system = s.compute_triangular_form()
# print system
# p1 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['0','1','1']), constant_term='2')
# s = LinearSystem([p1,p2])
# r = s.compute_rref()
# if not (r[0] == Plane(normal_vector=Vector(['1','0','0']), constant_term='-1') and
# r[1] == p2):
# print 'test case 1 failed'
# else:
# print 'Pass case 1'
#
# p1 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['1','1','1']), constant_term='2')
# s = LinearSystem([p1,p2])
# r = s.compute_rref()
# if not (r[0] == p1 and
# r[1] == Plane(constant_term='1')):
# print 'test case 2 failed'
# else:
# print 'Pass case 2'
#
# p1 = Plane(normal_vector=Vector(['1','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['0','1','0']), constant_term='2')
# p3 = Plane(normal_vector=Vector(['1','1','-1']), constant_term='3')
# p4 = Plane(normal_vector=Vector(['1','0','-2']), constant_term='2')
# s = LinearSystem([p1,p2,p3,p4])
# r = s.compute_rref()
# if not (r[0] == Plane(normal_vector=Vector(['1','0','0']), constant_term='0') and
# r[1] == p2 and
# r[2] == Plane(normal_vector=Vector(['0','0','-2']), constant_term='2') and
# r[3] == Plane()):
# print 'test case 3 failed'
# else:
# print 'Pass case 3'
#
#
# p1 = Plane(normal_vector=Vector(['0','1','1']), constant_term='1')
# p2 = Plane(normal_vector=Vector(['1','-1','1']), constant_term='2')
# p3 = Plane(normal_vector=Vector(['1','2','-5']), constant_term='3')
# s = LinearSystem([p1,p2,p3])
# r = s.compute_rref()
# if not (r[0] == Plane(normal_vector=Vector(['1','0','0']), constant_term=Decimal('23')/Decimal('9')) and
# r[1] == Plane(normal_vector=Vector(['0','1','0']), constant_term=Decimal('7')/Decimal('9')) and
# r[2] == Plane(normal_vector=Vector(['0','0','1']), constant_term=Decimal('2')/Decimal('9'))):
# print 'test case 4 failed'
# else:
# print 'Pass case 4'
# p0 = Plane(normal_vector=Vector(['5.862',' 1.178','-10.366']), constant_term='-8.15')
# p1 = Plane(normal_vector=Vector(['-2.931','-0.589','5.183']), constant_term='-4.075')
# p0 = Plane(normal_vector=Vector(['8.631','5.112','-1.816']), constant_term='-5.113')
# p1 = Plane(normal_vector=Vector(['4.315','11.132','-5.27']), constant_term='-6.775')
# p2 = Plane(normal_vector=Vector(['-2.158','3.01','-1.727']), constant_term='-0.831')
# p0 = Plane(normal_vector=Vector(['5.262','2.739','-9.878']), constant_term='-3.441')
# p1 = Plane(normal_vector=Vector(['5.111','6.358','7.638']), constant_term='-2.152')
# p2 = Plane(normal_vector=Vector(['2.016','-9.924','-1.367']), constant_term='-9.278')
# p3 = Plane(normal_vector=Vector(['2.167','-13.593','-18.883']), constant_term='-10.567')
p0 = Plane(Vector(['0.786', '0.786', '0.588']), '-0.714')
p1 = Plane(Vector(['-0.138', '-0.138', '0.244']), '0.319')
# p0 = Plane(Vector(['8.631', '5.112', '-1.816']), '-5.113')
# p1 = Plane(Vector(['4.315', '11.132', '-5.27']), '-6.775')
# p2 = Plane(Vector(['-2.158', '3.01', '-1.727']), '-0.831')
# p0 = Plane(Vector(['0.935', '1.76', '-9.365']), '-9.955')
# p1 = Plane(Vector(['0.187', '0.352', '-1.873']), '-1.991')
# p2 = Plane(Vector(['0.374', '0.704', '-3.746']), '-3.982')
# p3 = Plane(Vector(['-0.561', '-1.056', '5.619']), '5.973')
s = LinearSystem([p0, p1])
r = s.compute_solution()
print r
| 32.53125 | 128 | 0.589512 | 3,168 | 22,902 | 4.025884 | 0.07923 | 0.07433 | 0.079975 | 0.108201 | 0.93994 | 0.930767 | 0.92222 | 0.91681 | 0.91681 | 0.916026 | 0 | 0.055127 | 0.257838 | 22,902 | 703 | 129 | 32.577525 | 0.69524 | 0.310628 | 0 | 0.887821 | 0 | 0 | 0.043366 | 0 | 0 | 0 | 0 | 0 | 0.038462 | 0 | null | null | 0 | 0.025641 | null | null | 0.00641 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ce02caa88f365aef54016af21ed51fe63c6667b6 | 5,361 | py | Python | mysite/ct/migrations/0019_auto_20160614_0335.py | raccoongang/socraticqs2 | 06201005136ee139846f857dbb2f518736e441de | [
"Apache-2.0"
] | 3 | 2015-11-20T07:33:28.000Z | 2017-01-15T23:33:50.000Z | mysite/ct/migrations/0019_auto_20160614_0335.py | raccoongang/socraticqs2 | 06201005136ee139846f857dbb2f518736e441de | [
"Apache-2.0"
] | 28 | 2015-07-14T11:33:24.000Z | 2017-11-17T15:21:22.000Z | mysite/ct/migrations/0019_auto_20160614_0335.py | raccoongang/socraticqs2 | 06201005136ee139846f857dbb2f518736e441de | [
"Apache-2.0"
] | 4 | 2015-04-29T09:04:59.000Z | 2017-07-19T14:11:16.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('ct', '0018_unit_small_img_url'),
]
operations = [
migrations.AlterField(
model_name='concept',
name='approvedBy',
field=models.ForeignKey(related_name='approvedConcepts', blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='conceptgraph',
name='approvedBy',
field=models.ForeignKey(related_name='approvedConceptEdges', blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='course',
name='enrollCode',
field=models.CharField(max_length=64, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='course',
name='lockout',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='courseunit',
name='releaseTime',
field=models.DateTimeField(null=True, verbose_name=b'time released', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='changeLog',
field=models.TextField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='commitTime',
field=models.DateTimeField(null=True, verbose_name=b'time committed', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='concept',
field=models.ForeignKey(blank=True, to='ct.Concept', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='data',
field=models.TextField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='mergeParent',
field=models.ForeignKey(related_name='mergeChildren', blank=True, to='ct.Lesson', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='parent',
field=models.ForeignKey(related_name='children', blank=True, to='ct.Lesson', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='sourceDB',
field=models.CharField(max_length=32, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='sourceID',
field=models.CharField(max_length=100, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='text',
field=models.TextField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='treeID',
field=models.IntegerField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='lesson',
name='url',
field=models.CharField(max_length=256, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='response',
name='activity',
field=models.ForeignKey(blank=True, to='fsm.ActivityLog', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='response',
name='parent',
field=models.ForeignKey(blank=True, to='ct.Response', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='response',
name='title',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='studenterror',
name='activity',
field=models.ForeignKey(blank=True, to='fsm.ActivityLog', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='unitlesson',
name='lesson',
field=models.ForeignKey(blank=True, to='ct.Lesson', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='unitlesson',
name='order',
field=models.IntegerField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='unitlesson',
name='parent',
field=models.ForeignKey(blank=True, to='ct.UnitLesson', null=True),
preserve_default=True,
),
]
| 34.811688 | 125 | 0.564633 | 496 | 5,361 | 5.959677 | 0.169355 | 0.155616 | 0.19452 | 0.225643 | 0.838972 | 0.766915 | 0.766915 | 0.717185 | 0.717185 | 0.657645 | 0 | 0.005761 | 0.32009 | 5,361 | 153 | 126 | 35.039216 | 0.805213 | 0.003917 | 0 | 0.707483 | 0 | 0 | 0.101349 | 0.004309 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.020408 | 0 | 0.040816 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ce33e3e389f8bf8ba77832d23aa354896355dfa4 | 103 | py | Python | newsdb_test.py | atpavan/log-analysis | 6e4b26006feb822373c4569ea81e1a8abfdba4bd | [
"MIT"
] | null | null | null | newsdb_test.py | atpavan/log-analysis | 6e4b26006feb822373c4569ea81e1a8abfdba4bd | [
"MIT"
] | null | null | null | newsdb_test.py | atpavan/log-analysis | 6e4b26006feb822373c4569ea81e1a8abfdba4bd | [
"MIT"
] | null | null | null | from newsdb import get_tables, get_column_names
tables = get_tables()
get_column_names(tables= tables) | 25.75 | 47 | 0.834951 | 16 | 103 | 5 | 0.4375 | 0.3375 | 0.3 | 0.45 | 0.725 | 0.725 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097087 | 103 | 4 | 48 | 25.75 | 0.860215 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
02054372a4cca80f2fcb4d873cb26a51d4805c6f | 43 | py | Python | examples/str.expandtabs/ex1.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | examples/str.expandtabs/ex1.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | examples/str.expandtabs/ex1.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | print('01\t012\t0123\t01234'.expandtabs())
| 21.5 | 42 | 0.744186 | 6 | 43 | 5.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 0.023256 | 43 | 1 | 43 | 43 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0.465116 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
021d71c30bc0f1339c19e9f0ab1c996e32a55de9 | 12,568 | py | Python | _main_.py | xPrithvi/Quantum-Mechanics-Particle-In-A-Box-3D- | 2bb4f53d3947d260dc0d693bb12a4b4abf7e7b63 | [
"MIT"
] | 3 | 2020-06-27T11:59:18.000Z | 2020-12-09T06:15:44.000Z | _main_.py | xPrithvi/Quantum-Mechanics-Particle-In-A-Box-3D- | 2bb4f53d3947d260dc0d693bb12a4b4abf7e7b63 | [
"MIT"
] | null | null | null | _main_.py | xPrithvi/Quantum-Mechanics-Particle-In-A-Box-3D- | 2bb4f53d3947d260dc0d693bb12a4b4abf7e7b63 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import matplotlib.animation as animation
import matplotlib.patches as mpatches
from mpl_toolkits.mplot3d import axes3d
from matplotlib import style
import numpy as np
import math
import time
#plt.style.use('dark_background')
class ParticleInABox():
class OneDimensional():
def __init__(self, length_x, quantum_number, time_dependence):
self.length_x = length_x
self.quantum_number = quantum_number
self.time_dependence = time_dependence
def wavefunction(self):
if self.time_dependence == True:
figure = plt.figure()
figure.show()
"""Generate time intervals."""
t = []
for i in range(0, 1000):
u = i*100
t.append(u)
for time_interval in t:
plt.clf()
axis = figure.add_subplot(111)
axis.autoscale(False)
plt.xlim(0, self.length_x)
plt.ylim(-1.5, 1.5)
x = []
for i in range((self.length_x)*100):
u = i/100
x.append(u)
real_y = []
imaginary_y = []
for i in x:
real_output = math.cos((pow(self.quantum_number, 2)*6.626*pow(10, -34)*math.pi*time_interval)/(4*9.109*pow(10, -31)*self.length_x))*math.sqrt((2)/(self.length_x))*math.sin((math.pi*self.quantum_number*i)/(self.length_x))
imaginary_output = -1*math.sin((pow(self.quantum_number, 2)*6.626*pow(10, -34)*math.pi*time_interval)/(4*9.109*pow(10, -31)*self.length_x))*math.sqrt((2)/(self.length_x))*math.sin((math.pi*self.quantum_number*i)/(self.length_x))
real_y.append(real_output)
imaginary_y.append(imaginary_output)
axis.plot(x, real_y, color = "blue", label = r'$Re[\psi(x)$]')
axis.plot(x, imaginary_y, color = "red", label = r'$Im[\psi(x)]$')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='lower left', ncol=2, mode="expand", borderaxespad=0.)
axis.set_xlabel(r'$x$')
figure.canvas.draw()
time.sleep(0.01)
else:
x = []
for i in range((self.length_x)*100):
u = i/100
x.append(u)
y = []
for i in x:
output = math.sqrt((2)/(self.length_x))*math.sin((math.pi*self.quantum_number*i)/(self.length_x))
y.append(output)
plt.plot(x, y)
plt.show()
def PDF(self):
if self.time_dependence == True:
figure = plt.figure()
figure.show()
"""Generate time intervals."""
t = []
for i in range(0, 1000):
u = i*100
t.append(u)
for time_interval in t:
plt.clf()
axis = figure.add_subplot(111)
axis.autoscale(False)
plt.xlim(0, self.length_x)
plt.ylim(-1.5, 1.5)
x = []
for i in range((self.length_x)*100):
u = i/100
x.append(u)
y = []
for i in x:
output = math.exp(-1*(pow(self.quantum_number, 2)*6.626*pow(10, -34)*math.pi*time_interval)/(4*9.109*pow(10, -31)*self.length_x))*(2/self.length_x)*pow(math.sin((math.pi*self.quantum_number*i)/(self.length_x)), 2)
y.append(output)
axis.plot(x, y, color = "blue")
axis.set_xlabel(r'$x$')
axis.set_ylabel(r'$|\psi(x)|^2$')
figure.canvas.draw()
time.sleep(0.01)
else:
x = []
for i in range((self.length_x)*100):
u = i/100
print(u)
x.append(u)
y = []
for i in x:
output = (2/self.length_x)*pow(math.sin((math.pi*self.quantum_number*i)/(self.length_x)), 2)
y.append(output)
plt.plot(x, y)
plt.xlabel('\\alpha')
plt.ylabel('\\beta')
plt.show()
class TwoDimensional():
def __init__(self, length_x, length_y, quantum_number_x, quantum_number_y):
self.length_x = length_x
self.length_y = length_y
self.quantum_number_x = quantum_number_x
self.quantum_number_y = quantum_number_y
def wavefunction(self):
"""Generating x-coordinates."""
x = np.linspace(0, self.length_x, 100)
"""Generating y-coordinates."""
y = np.linspace(0, self.length_y, 100)
"""Generating all possible xy-coordinates."""
X,Y = np.meshgrid(x,y)
"""Generating z-coordinates from xy-coordinates."""
Z = np.sqrt((2)/(self.length_x*self.length_y))*np.sin((math.pi*self.quantum_number_x*X)/(self.length_x))*np.sin((math.pi*self.quantum_number_y*Y)/(self.length_y))
"""Generating plot."""
figure = plt.figure()
axis = figure.gca(projection='3d')
contour = axis.plot_surface(X,Y,Z,cmap='hot')
figure.colorbar(contour, shrink = 0.75)
axis.set_xlabel(r'$x$')
axis.set_ylabel(r'$y$')
axis.set_zlabel(r'$\psi(x, y)$')
axis.view_init(elev = 30, azim = -135)
plt.show()
def PDF(self):
"""Generating x-coordinates."""
x = np.linspace(0, self.length_x, 1000)
"""Generating y-coordinates."""
y = np.linspace(0, self.length_y, 1000)
"""Generating all possible xy-coordinates."""
X,Y = np.meshgrid(x,y)
"""Generating z-coordinates from xy-coordinates."""
Z = (2/self.length_x*self.length_y)*pow(np.sin((math.pi*self.quantum_number_x*X)/(self.length_x))*np.sin((math.pi*self.quantum_number_y*Y)/(self.length_y)),2)
"""Generating plot."""
figure = plt.figure()
axis = figure.gca(projection='3d')
contour = axis.plot_surface(X,Y,Z,cmap='hot')
figure.colorbar(contour, shrink = 0.75)
axis.set_xlabel(r'$x$')
axis.set_ylabel(r'$y$')
axis.set_zlabel(r'$|\psi(x, y)|^2$')
axis.view_init(elev = 30, azim = -135)
plt.show()
class ThreeDimensional():
def __init__(self, length_x, length_y, length_z, quantum_number_x, quantum_number_y, quantum_number_z, scatter_density):
self.length_x = length_x
self.length_y = length_y
self.length_z = length_z
self.quantum_number_x = quantum_number_x
self.quantum_number_y = quantum_number_y
self.quantum_number_z = quantum_number_z
self.scatter_density = scatter_density
def wavefunction(self):
"""Generating x-coordinates."""
x = []
for i in range((self.length_x)*self.scatter_density):
u = i/self.scatter_density
x.append(u)
"""Generating y-coordinates."""
y = []
for i in range((self.length_y)*self.scatter_density):
u = i/self.scatter_density
y.append(u)
"""Generating z-coordinates."""
z = []
for i in range((self.length_z)*self.scatter_density):
u = i/self.scatter_density
z.append(u)
"""Generating all possible xyz-coordinates."""
space_coordinates = []
counter = 0
for i in x:
for j in y:
for k in z:
coordinate = []
coordinate.append(i)
coordinate.append(j)
coordinate.append(k)
space_coordinates.append(coordinate)
counter = counter + 1
"""Generating colour-coordinates from xyz-coordinates."""
for coordinate in space_coordinates:
output = math.sqrt((2)/(self.length_x*self.length_y*self.length_z))*math.sin((math.pi*self.quantum_number_x*coordinate[0])/(self.length_x))*math.sin((math.pi*self.quantum_number_y*coordinate[1])/(self.length_y))*math.sin((math.pi*self.quantum_number_z*coordinate[2])/(self.length_z))
coordinate.append(output)
"""Formatting colour-xyz-coordinates."""
x_plot = []
y_plot = []
z_plot = []
colour_plot = []
for coordinate in space_coordinates:
x_plot.append(coordinate[0])
y_plot.append(coordinate[1])
z_plot.append(coordinate[2])
colour_plot.append(coordinate[3])
"""Generating plot"""
fig = plt.figure()
ax1 = fig.add_subplot(111, projection='3d')
img = ax1.scatter(x_plot, y_plot, z_plot, c=colour_plot, cmap=plt.get_cmap('jet'))
fig.colorbar(img)
ax1.set_xlabel(r'$x$')
ax1.set_ylabel(r'$y$')
ax1.set_zlabel(r'$|\psi(x, y)|^2$')
plt.show()
def PDF(self):
"""Generating x-coordinates."""
x = []
for i in range((self.length_x)*self.scatter_density):
u = i/self.scatter_density
x.append(u)
"""Generating y-coordinates."""
y = []
for i in range((self.length_y)*self.scatter_density):
u = i/self.scatter_density
y.append(u)
"""Generating z-coordinates."""
z = []
for i in range((self.length_z)*self.scatter_density):
u = i/self.scatter_density
z.append(u)
"""Generating all possible xyz-coordinates."""
space_coordinates = []
counter = 0
for i in x:
for j in y:
for k in z:
coordinate = []
coordinate.append(i)
coordinate.append(j)
coordinate.append(k)
space_coordinates.append(coordinate)
counter = counter + 1
"""Generating colour-coordinates from xyz-coordinates."""
for coordinate in space_coordinates:
output = (2/(self.length_x*self.length_y*self.length_z))*pow(math.sin((math.pi*self.quantum_number_x*coordinate[0])/(self.length_x))*math.sin((math.pi*self.quantum_number_y*coordinate[1])/(self.length_y))*math.sin((math.pi*self.quantum_number_z*coordinate[2])/(self.length_z)),2)
coordinate.append(output)
"""Formatting colour-xyz-coordinates."""
x_plot = []
y_plot = []
z_plot = []
colour_plot = []
for coordinate in space_coordinates:
x_plot.append(coordinate[0])
y_plot.append(coordinate[1])
z_plot.append(coordinate[2])
colour_plot.append(coordinate[3])
"""Generating plot"""
fig = plt.figure()
ax1 = fig.add_subplot(111, projection='3d')
img = ax1.scatter(x_plot, y_plot, z_plot, c=colour_plot, cmap=plt.get_cmap('jet'))
fig.colorbar(img)
ax1.set_xlabel(r'$x$')
ax1.set_ylabel(r'$y$')
ax1.set_zlabel(r'$z$')
plt.title(r'$|\psi(x, y, z)|^{2}$')
plt.show()
ParticleInABox().OneDimensional(length_x = 1, quantum_number = 2, time_dependence = True).PDF()
| 38.910217 | 300 | 0.484803 | 1,449 | 12,568 | 4.040028 | 0.109041 | 0.099078 | 0.069525 | 0.033311 | 0.843355 | 0.831227 | 0.817902 | 0.798428 | 0.7863 | 0.76341 | 0 | 0.028784 | 0.389083 | 12,568 | 322 | 301 | 39.031056 | 0.733655 | 0.010821 | 0 | 0.778261 | 0 | 0 | 0.017466 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03913 | false | 0 | 0.034783 | 0 | 0.091304 | 0.004348 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
027048aee89a2df2b84670669dbd9e1bf935b56b | 6,144 | py | Python | mmdet3d/core/bbox/coders/delta_xyzwhlr_bbox_coder.py | mjseong0414/SPA_Radar_mmdet3d | ae4eee101a5665b72586d3d5db06832bf45b3b33 | [
"Apache-2.0"
] | null | null | null | mmdet3d/core/bbox/coders/delta_xyzwhlr_bbox_coder.py | mjseong0414/SPA_Radar_mmdet3d | ae4eee101a5665b72586d3d5db06832bf45b3b33 | [
"Apache-2.0"
] | null | null | null | mmdet3d/core/bbox/coders/delta_xyzwhlr_bbox_coder.py | mjseong0414/SPA_Radar_mmdet3d | ae4eee101a5665b72586d3d5db06832bf45b3b33 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) OpenMMLab. All rights reserved.
import torch
from mmdet.core.bbox import BaseBBoxCoder
from mmdet.core.bbox.builder import BBOX_CODERS
@BBOX_CODERS.register_module()
class DeltaXYZWLHRBBoxCoder(BaseBBoxCoder):
"""Bbox Coder for 3D boxes.
Args:
code_size (int): The dimension of boxes to be encoded.
"""
def __init__(self, code_size=7):
super(DeltaXYZWLHRBBoxCoder, self).__init__()
self.code_size = code_size
self.name = 'DeltaXYZWLHRBBoxCoder'
@staticmethod
def encode(src_boxes, dst_boxes):
"""Get box regression transformation deltas (dx, dy, dz, dw, dh, dl,
dr, dv*) that can be used to transform the `src_boxes` into the
`target_boxes`.
Args:
src_boxes (torch.Tensor): source boxes, e.g., object proposals.
dst_boxes (torch.Tensor): target of the transformation, e.g.,
ground-truth boxes.
Returns:
torch.Tensor: Box transformation deltas.
"""
box_ndim = src_boxes.shape[-1]
cas, cgs, cts = [], [], []
if box_ndim > 7:
xa, ya, za, wa, la, ha, ra, *cas = torch.split(
src_boxes, 1, dim=-1)
xg, yg, zg, wg, lg, hg, rg, *cgs = torch.split(
dst_boxes, 1, dim=-1)
cts = [g - a for g, a in zip(cgs, cas)]
else:
xa, ya, za, wa, la, ha, ra = torch.split(src_boxes, 1, dim=-1)
xg, yg, zg, wg, lg, hg, rg = torch.split(dst_boxes, 1, dim=-1)
za = za + ha / 2
zg = zg + hg / 2
diagonal = torch.sqrt(la**2 + wa**2)
xt = (xg - xa) / diagonal
yt = (yg - ya) / diagonal
zt = (zg - za) / ha
lt = torch.log(lg / la)
wt = torch.log(wg / wa)
ht = torch.log(hg / ha)
rt = rg - ra
return torch.cat([xt, yt, zt, wt, lt, ht, rt, *cts], dim=-1)
@staticmethod
def decode(anchors, deltas):
"""Apply transformation `deltas` (dx, dy, dz, dw, dh, dl, dr, dv*) to
`boxes`.
Args:
anchors (torch.Tensor): Parameters of anchors with shape (N, 7).
deltas (torch.Tensor): Encoded boxes with shape
(N, 7+n) [x, y, z, w, l, h, r, velo*].
Returns:
torch.Tensor: Decoded boxes.
"""
cas, cts = [], []
box_ndim = anchors.shape[-1]
if box_ndim > 7:
xa, ya, za, wa, la, ha, ra, *cas = torch.split(anchors, 1, dim=-1)
xt, yt, zt, wt, lt, ht, rt, *cts = torch.split(deltas, 1, dim=-1)
else:
xa, ya, za, wa, la, ha, ra = torch.split(anchors, 1, dim=-1)
xt, yt, zt, wt, lt, ht, rt = torch.split(deltas, 1, dim=-1)
za = za + ha / 2
diagonal = torch.sqrt(la**2 + wa**2)
xg = xt * diagonal + xa
yg = yt * diagonal + ya
zg = zt * ha + za
lg = torch.exp(lt) * la
wg = torch.exp(wt) * wa
hg = torch.exp(ht) * ha
rg = rt + ra
zg = zg - hg / 2
cgs = [t + a for t, a in zip(cts, cas)]
return torch.cat([xg, yg, zg, wg, lg, hg, rg, *cgs], dim=-1)
@BBOX_CODERS.register_module()
class DeltaXYWLRBBoxCoder(BaseBBoxCoder):
"""Bbox Coder for 3D boxes.
Args:
code_size (int): The dimension of boxes to be encoded.
"""
def __init__(self, code_size=7):
super(DeltaXYWLRBBoxCoder, self).__init__()
self.code_size = code_size
self.name = 'DeltaXYWLRBBoxCoder'
self.z_fixed = 2.0
self.h_fixed = -1.0
@staticmethod
def encode(src_boxes, dst_boxes):
"""Get box regression transformation deltas (dx, dy, dw, dh, dr, dv*) that can be used to transform the `src_boxes` into the
`target_boxes`.
Args:
src_boxes (torch.Tensor): source boxes, e.g., object proposals.
dst_boxes (torch.Tensor): target of the transformation, e.g.,
ground-truth boxes.
Returns:
torch.Tensor: Box transformation deltas.
"""
box_ndim = src_boxes.shape[-1]
cas, cgs, cts = [], [], []
if box_ndim > 7:
xa, ya, za, wa, la, ha, ra, *cas = torch.split(
src_boxes, 1, dim=-1)
xg, yg, zg, wg, lg, hg, rg, *cgs = torch.split(
dst_boxes, 1, dim=-1)
cts = [g - a for g, a in zip(cgs, cas)]
else:
xa, ya, za, wa, la, ha, ra = torch.split(src_boxes, 1, dim=-1)
xg, yg, zg, wg, lg, hg, rg = torch.split(dst_boxes, 1, dim=-1)
diagonal = torch.sqrt(la**2 + wa**2)
xt = (xg - xa) / diagonal
yt = (yg - ya) / diagonal
lt = torch.log(lg / la)
wt = torch.log(wg / wa)
rt = rg - ra
return torch.cat([xt, yt, wt, lt, rt, *cts], dim=-1)
@staticmethod
def decode(anchors, deltas):
"""Apply transformation `deltas` (dx, dy, dz, dw, dh, dl, dr, dv*) to
`boxes`.
Args:
anchors (torch.Tensor): Parameters of anchors with shape (N, 7).
deltas (torch.Tensor): Encoded boxes with shape
(N, 7+n) [x, y, z, w, l, h, r, velo*].
Returns:
torch.Tensor: Decoded boxes.
"""
cas, cts = [], []
box_ndim = anchors.shape[-1]
if box_ndim > 7:
xa, ya, za, wa, la, ha, ra, *cas = torch.split(anchors, 1, dim=-1)
xt, yt, wt, lt, rt, *cts = torch.split(deltas, 1, dim=-1)
else:
xa, ya, za, wa, la, ha, ra = torch.split(anchors, 1, dim=-1)
xt, yt, wt, lt, rt = torch.split(deltas, 1, dim=-1)
diagonal = torch.sqrt(la**2 + wa**2)
xg = xt * diagonal + xa
yg = yt * diagonal + ya
lg = torch.exp(lt) * la
wg = torch.exp(wt) * wa
rg = rt + ra
ret = torch.cat([xg, yg, wg, lg, rg], dim=-1)
z_fixed = -1.0*torch.ones_like(xg)
h_fixed = 2.0*torch.ones_like(xg)
cgs = [t + a for t, a in zip(cts, cas)]
return torch.cat([ret[...,:2], z_fixed, ret[..., 2:4], h_fixed, ret[..., 4:], *cgs], dim=-1)
| 34.516854 | 132 | 0.515299 | 880 | 6,144 | 3.521591 | 0.153409 | 0.027106 | 0.025815 | 0.020652 | 0.866086 | 0.837044 | 0.835753 | 0.819619 | 0.795418 | 0.772185 | 0 | 0.018975 | 0.339518 | 6,144 | 177 | 133 | 34.711864 | 0.744702 | 0.251953 | 0 | 0.730769 | 0 | 0 | 0.009365 | 0.004917 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057692 | false | 0 | 0.028846 | 0 | 0.144231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0282109c967f00cfd8d334089dd07e510702d21a | 17,803 | py | Python | tao1/core/set.py | MortalViews/tao1 | 2b913c5d0b4d99c626ea719218b3607a3f7273cc | [
"MIT"
] | 25 | 2015-08-06T20:42:20.000Z | 2021-06-04T09:24:37.000Z | tao1/core/set.py | MortalViews/tao1 | 2b913c5d0b4d99c626ea719218b3607a3f7273cc | [
"MIT"
] | null | null | null | tao1/core/set.py | MortalViews/tao1 | 2b913c5d0b4d99c626ea719218b3607a3f7273cc | [
"MIT"
] | 4 | 2015-09-27T21:24:30.000Z | 2021-01-09T15:42:52.000Z |
def get_doc_forum():
return {"_id":"des:forum", "hierarchy": ["tree"],"type": "templ", "actions": [{"title": {"ru": "","en": ""},"hint": {"ru": "Создать новую запись","en": "Create new row"},"id": "new","visible": True,"action": "this.createRow","type": "button","class": {"inrow": False,"toolbar": True,"context": True},"icon": "ui-icon-document"},{"title": {"ru": "","en": ""},"hint": {"ru": "Удалить выделенные записи","en": "Delete"},"id": "del","visible": True,"action": "this.deleteRow","type": "button","class": {"inrow": True,"toolbar": True,"context": True},"icon": "ui-icon-trash"},{"title": {"ru": "","en": ""},"hint": {"ru": "Обновить","en": "Refresh"},"id": "refresh","visible": True,"action": "this.updatelist","type": "button","class": {"inrow": False,"toolbar": True,"context": True},"icon": "ui-icon-refresh"},{"title": {"ru": "","en": ""},"hint": {"ru": "Дублировать","en": "Duplicate"},"id": "duplicate","visible": True,"action": "this.duplicate","type": "button","class": {"inrow": True,"toolbar": True,"context": True},"icon": "ui-icon-copy"},{"title": {"ru": "","en": ""},"hint": {"ru": "Редактировать","en": "Edit"},"id": "edit","visible": True,"action": "this.edit","type": "button","class": {"inrow": True,"toolbar": True,"context": True},"icon": "ui-icon-wrench"},{"title": {"ru": "","en": ""},"hint": {"ru": "Добавить поле","en": "Add field"},"id": "add_field","visible": True,"action": "this.add_field","type": "button","class": {"inrow": False,"toolbar": True,"context": False},"icon": "ui-icon-arrowreturnthick-1-s"},{"title": {"ru": "","en": ""},"hint": {"ru": "Удалить поле","en": "Delete field"},"id": "del_field","visible": True,"action": "this.del_field","type": "button","class": {"inrow": False,"toolbar": True,"context": False},"icon": "ui-icon-arrowreturnthick-1-n"},{"title": {"ru": "","en": ""},"hint": {"ru": "Перенести документ","en": "Move document"},"id": "transfer","visible": True,"action": "this.transfer","type": "button","class": {"inrow": False,"toolbar": True,"context": False},"icon": "ui-icon-transferthick-e-w"},{"hint": {"ru": "Отредактировать поле","en": "Edit field"},"title": {"ru": "","en": ""},"id": "edit_field","visible": True,"action": "this.edit_field","type": "button","class": {"inrow": False,"context": True,"toolbar": True},"icon": "ui-icon-gear"}],
"conf": {"turn":"true", "is_doc": False,"doc_type": "des:forum","is_article": "true","comments": "on","title": {"ru": "Форум","en": "Forum"}},
"attached_hierarchies": None, "field_map": [{"title": {"ru": "название"},"hint": {"ru": "название"},"is_translate": True,"is_editable": True,"visible": True,"oncreate": "edit","type": "string","id": "title"},{"title": {"ru": "Описание","en": "undefined"},"hint": {"ru": "Описание","en": "undefined"},"is_translate": "true","is_editable": "true","visible": "true","relation": "com:des:library","oncreate": "edit","type": "string","id": "descr"},{"title": {"ru": "Содержание"},"hint": {"ru": "Содержание"},"is_translate": True,"is_editable": True,"visible": False,"oncreate": "edit","type": "rich_edit","id": "body"},{"title": {"ru": "Дата"},"hint": {"ru": "Дата"},"is_translate": False,"is_editable": True,"visible": True,"oncreate": "edit","type": "date","id": "date"},{"hint": {"ru": "","en": ""},"title": {"ru": "Опубликовать","en": "Published"},"is_editable": "true","visible": "true","relation": "com:des:2","oncreate": "edit","type": "checkbox","id": "published"},{"title": {"ru": "Автор","en": "Author"},"hint": {"ru": "","en": ""},"is_editable": "true","visible": "true","relation": "des:users","oncreate": "edit","type": "select","id": "user"},{"hint": {"ru": "Показ на главной странице","en": ""},"title": {"ru": "Главная","en": "Home"},"is_editable": "true","visible": "true","relation": "com:des:library","oncreate": "edit","type": "checkbox","id": "home"},{"hint": {"ru": "","en": ""},"title": {"ru": "Простой стиль","en": "Simple style"},"is_editable": "true","visible": "true","relation": "com:des:library","oncreate": "edit","type": "checkbox","id": "simple_style"}]}
def get_doc_obj():
return {"_id":"des:obj", "hierarchy": ["tree"],"type": "templ", "actions": [{"title": {"ru": "","en": ""},"hint": {"ru": "Создать новую запись","en": "Create new row"},"id": "new","visible": True,"action": "this.createRow","type": "button","class": {"inrow": False,"toolbar": True,"context": True},"icon": "ui-icon-document"},{"title": {"ru": "","en": ""},"hint": {"ru": "Удалить выделенные записи","en": "Delete"},"id": "del","visible": True,"action": "this.deleteRow","type": "button","class": {"inrow": True,"toolbar": True,"context": True},"icon": "ui-icon-trash"},{"title": {"ru": "","en": ""},"hint": {"ru": "Обновить","en": "Refresh"},"id": "refresh","visible": True,"action": "this.updatelist","type": "button","class": {"inrow": False,"toolbar": True,"context": True},"icon": "ui-icon-refresh"},{"title": {"ru": "","en": ""},"hint": {"ru": "Дублировать","en": "Duplicate"},"id": "duplicate","visible": True,"action": "this.duplicate","type": "button","class": {"inrow": True,"toolbar": True,"context": True},"icon": "ui-icon-copy"},{"title": {"ru": "","en": ""},"hint": {"ru": "Редактировать","en": "Edit"},"id": "edit","visible": True,"action": "this.edit","type": "button","class": {"inrow": True,"toolbar": True,"context": True},"icon": "ui-icon-wrench"},{"title": {"ru": "","en": ""},"hint": {"ru": "Добавить поле","en": "Add field"},"id": "add_field","visible": True,"action": "this.add_field","type": "button","class": {"inrow": False,"toolbar": True,"context": False},"icon": "ui-icon-arrowreturnthick-1-s"},{"title": {"ru": "","en": ""},"hint": {"ru": "Удалить поле","en": "Delete field"},"id": "del_field","visible": True,"action": "this.del_field","type": "button","class": {"inrow": False,"toolbar": True,"context": False},"icon": "ui-icon-arrowreturnthick-1-n"},{"title": {"ru": "","en": ""},"hint": {"ru": "Перенести документ","en": "Move document"},"id": "transfer","visible": True,"action": "this.transfer","type": "button","class": {"inrow": False,"toolbar": True,"context": False},"icon": "ui-icon-transferthick-e-w"},{"hint": {"ru": "Отредактировать поле","en": "Edit field"},"title": {"ru": "","en": ""},"id": "edit_field","visible": True,"action": "this.edit_field","type": "button","class": {"inrow": False,"context": True,"toolbar": True},"icon": "ui-icon-gear"}],
"conf": {"turn":"true", "is_doc": False,"doc_type": "des:obj","is_article": "true","comments": "on","title": {"ru": "Материалы","en": "Materials"}},
"attached_hierarchies": None, "field_map": [{"title": {"ru": "название"},"hint": {"ru": "название"},"is_translate": True,"is_editable": True,"visible": True,"oncreate": "edit","type": "string","id": "title"},{"title": {"ru": "Описание","en": "undefined"},"hint": {"ru": "Описание","en": "undefined"},"is_translate": "true","is_editable": "true","visible": "true","relation": "com:des:library","oncreate": "edit","type": "string","id": "descr"},{"title": {"ru": "Содержание"},"hint": {"ru": "Содержание"},"is_translate": True,"is_editable": True,"visible": False,"oncreate": "edit","type": "rich_edit","id": "body"},{"title": {"ru": "Дата"},"hint": {"ru": "Дата"},"is_translate": False,"is_editable": True,"visible": True,"oncreate": "edit","type": "date","id": "date"},{"hint": {"ru": "","en": ""},"title": {"ru": "Опубликовать","en": "Published"},"is_editable": "true","visible": "true","relation": "com:des:2","oncreate": "edit","type": "checkbox","id": "published"},{"title": {"ru": "Автор","en": "Author"},"hint": {"ru": "","en": ""},"is_editable": "true","visible": "true","relation": "des:users","oncreate": "edit","type": "select","id": "user"},{"hint": {"ru": "Показ на главной странице","en": ""},"title": {"ru": "Главная","en": "Home"},"is_editable": "true","visible": "true","relation": "com:des:library","oncreate": "edit","type": "checkbox","id": "home"},{"hint": {"ru": "","en": ""},"title": {"ru": "Простой стиль","en": "Simple style"},"is_editable": "true","visible": "true","relation": "com:des:library","oncreate": "edit","type": "checkbox","id": "simple_style"}]}
def get_doc_ware():
return {"id":"des:ware", "hierarchy": [ "tree:ware" ], "type": "templ", "actions": [ { "hint": { "ru": "Создать новую запись", "en": "Create new row" }, "title": { "ru": "", "en": "" }, "class": { "inrow": False, "context": True, "toolbar": True }, "visible": True, "action": "this.createRow", "type": "button", "id": "new", "icon": "ui-icon-document" }, { "hint": { "ru": "Удалить выделенные записи", "en": "Delete" }, "title": { "ru": "", "en": "" }, "class": { "inrow": True, "context": True, "toolbar": true }, "visible": true, "action": "this.deleteRow", "type": "button", "id": "del", "icon": "ui-icon-trash" }, { "hint": { "ru": "Обновить", "en": "Refresh" }, "title": { "ru": "", "en": "" }, "class": { "inrow": False, "context": True, "toolbar": True }, "visible": True, "action": "this.updatelist", "type": "button", "id": "refresh", "icon": "ui-icon-refresh" }, { "hint": { "ru": "Дублировать", "en": "Duplicate" }, "title": { "ru": "", "en": "" }, "class": { "inrow": True, "context": True, "toolbar": True }, "visible": True, "action": "this.duplicate", "type": "button", "id": "duplicate", "icon": "ui-icon-copy" }, { "hint": { "ru": "Редактировать", "en": "Edit" }, "title": { "ru": "", "en": "" }, "class": { "inrow": True, "context": True, "toolbar": True }, "visible": True, "action": "this.edit", "type": "button", "id": "edit", "icon": "ui-icon-wrench" }, { "hint": { "ru": "Добавить поле", "en": "Add field" }, "title": { "ru": "", "en": "" }, "class": { "inrow": False, "context": False, "toolbar": True }, "visible": True, "action": "this.add_field", "type": "button", "id": "add_field", "icon": "ui-icon-arrowreturnthick-1-s" }, { "hint": { "ru": "Удалить поле", "en": "Delete field" }, "title": { "ru": "", "en": "" }, "class": { "inrow": False, "context": False, "toolbar": True }, "visible": True, "action": "this.del_field", "type": "button", "id": "del_field", "icon": "ui-icon-arrowreturnthick-1-n" }, { "hint": { "ru": "Перенести документ", "en": "Move document" }, "title": { "ru": "", "en": "" }, "class": { "inrow": false, "context": false, "toolbar": true }, "visible": true, "action": "this.transfer", "type": "button", "id": "transfer", "icon": "ui-icon-transferthick-e-w" }, { "visible": true, "icon": "ui-icon-custom custom-print_excel", "hint": "Распечатать excel", "action": "this.print_excel", "title": "", "type": "button", "id": "print_excel", "class": { "inrow": false, "toolbar": true, "context": false } } ],
"conf": {"turn":"true", "is_doc": 'false', "title": { "ru": "Товары", "en": "Ware" }, "type": "templ", "comments": "on", "doc_type": "des:ware" },
"attached_hierarchies": None, "field_map": [ { "hint": { "ru": "Отображение на главной странице сайта", "en": "" }, "is_translate": False, "is_editable": True, "visible": False, "title": { "ru": "Главная", "en": "" }, "oncreate": "edit", "type": "checkbox", "id": "home" }, { "title": { "ru": "Название", "en": "" }, "hint": { "ru": "Название товара", "en": "" }, "is_translate": True, "is_editable": "true", "visible": "true", "oncreate": "edit", "type": "string", "id": "title" }, { "hint": { "ru": "Описание товара", "en": "" }, "is_translate": True, "is_editable": "true", "visible": "true", "title": { "ru": "Описание", "en": "" }, "oncreate": "edit", "type": "string", "id": "descr" }, { "hint": { "ru": "Страна производитель", "en": "" }, "is_translate": True, "is_editable": "true", "visible": "true", "relation": "Country", "title": { "ru": "Страна", "en": "" }, "oncreate": "edit", "type": "select", "id": "country" }, { "title": { "ru": "Цена", "en": "" }, "hint": { "ru": "Цена товара", "en": "" }, "is_translate": False, "is_editable": "true", "visible": "true", "oncreate": "edit", "type": "string", "id": "price" }]}
def get_doc_des(id, name_ru, name_en, owner, is_doc=False):
doc = {
"_id": "des:"+id,
"hierarchy": [ "tree:"+id ], "attached_hierarchies": None,
"events": {},
"type": "templ",
"conf":{"comments": "on", "title": {"ru":name_ru, "en":name_en}, "doc_type":"des:"+ id, "owner":owner, "turn":"true"},
"doc": [
{"title": {"ru": u"Опубликовано","en": "Published"},"hint": {"ru": "","en": ""},"is_translate": "false","is_editable": "true","visible": "true","relation": "des:web_order","oncreate": "edit","type": "checkbox","id": "pub"},
{"title": {"ru": u"Автор","en": "Author"},"hint": {"ru": "","en": ""},"is_editable": "true","visible": "true","relation": "des:users","oncreate": "edit","type": "select","id": "user"},
{"title": {"ru": u"Тэги","en": "Tags"},"hint": {"ru": "","en": ""},"is_translate": "true","is_editable": "true","visible": "true","relation": "com:des:obj","oncreate": "edit","type": "string","id": "tags"},
{"title": {"ru": u"Название"},"hint": {"ru": u"Название"},"is_translate": True,"is_editable": True,"visible": True,"oncreate": "edit","type": "string","id": "title"},
{"title": {"ru": u"Содержание"},"hint": {"ru": u"Содержание"},"is_translate": True,"is_editable": True,"visible": False,"oncreate": "edit","type": "rich_edit","id": "body"},
{"title": {"ru": u"Дата"},"hint": {"ru": u"Дата"},"is_translate": False,"is_editable": True,"visible": True,"oncreate": "edit","type": "date","id": "date"},
{"title": {"ru": u"Анонс","en": "Description"},"hint": {"ru": "","en": ""},"is_translate": "true","is_editable": "true","visible": "true","relation": "des:web_order","oncreate": "edit","type": "string","id": "descr"},
{"hint": {"ru": "","en": ""},"is_translate": "false","is_editable": "false","visible": "true","relation": "com:des:obj","title": {"ru": "Второй id","en": "Second id"},"oncreate": "edit","type": "string","id": "rev"}
]
}
if is_doc=='doc' :
doc['conf'].update({"is_doc":True, "turn":"true"})
doc['actions'].append(
{"hint": u"Провести документ", "title": "", "class": { "inrow": False, "context": False, "toolbar": True},
"visible": True, "action": "this.checkout", "type": "button", "id": "check", "icon": "icon-check"}
)
elif is_doc!='doc' and owner=='_' :
doc['conf'].update({"is_doc":False, "turn":"true"})
if is_doc=='doc' and owner=='_':
aaa = [
{"hint": {"ru": "", "en": ""}, "title": {"ru": "Номер", "en": "Number" }, "is_editable":"true", "visible":"true", "oncreate":"edit", "type":"string", "id":"number" },
{"hint": {"ru": "", "en": ""}, "title": {"ru": "Статус", "en": "Status" }, "is_editable":"true", "visible":"true", "oncreate":"edit", "type":"string", "id":"status" },
{"hint": {"ru": "", "en": ""}, "title": {"ru":"Контерагент", "en":"Counteragent"}, "is_editable":"true", "visible":"true", "relation":"des:counteragent", "oncreate": "edit", "type":"select", "id":"counteragent"},
{"hint": {"ru": "", "en": ""}, "title": { "ru": "Предприятие", "en": "Enterprise"}, "is_editable": "true", "visible": "true", "relation": "des:enterprise", "oncreate": "edit", "type": "select", "id":"enterprise" },
{"hint": {"ru": "", "en": ""}, "title": {"ru": "Сумма", "en": "Amount"}, "is_editable": "true", "visible": "true", "oncreate": "hide", "type": "string", "id": "amount" },
{"hint": {"ru": "", "en": ""}, "title": { "ru": "Сумма с НДС", "en": "Amount VAT"}, "is_editable": "true", "visible": "true", "relation": "des: ", "oncreate": "hide", "type": "string", "id": "amount_vat" }
]
for res in aaa:
doc['field_map'].append(res)
elif is_doc=='doc' and owner !='_':
aaa = [
{"hint": {"ru": "Название товара", "en": "Title ware"}, "title": {"ru": "Название", "en": "title"}, "is_editable": "true", "visible": "true", "relation": "des: ware", "oncreate": "edit", "type": "select", "id": "title"},
{"hint": {"ru": "Количество","en": "Quantity"}, "title": {"ru": "Количество", "en": "Quantity"}, "is_editable": "true", "visible": "true", "relation": "des: 1", "oncreate": "edit", "type": "string", "id": "quantity"},
{"hint": {"ru": "", "en":""}, "title": {"ru": "Цена", "en": "Price"}, "is_editable": "true", "visible": "true", "relation": "des: 1", "oncreate": "edit", "type": "string","id": "price" },
{"hint": {"ru": "", "en":""}, "title": {"ru": "Цена с НДС","en": "Price VAT"}, "is_editable": "true","relation": "des: ","oncreate": "edit","type": "string","id": "price_vat"},
{"hint": {"ru": "", "en":""}, "title": {"ru": "Сумма", "en": "Amount" }, "is_editable": "false","visible": "true","relation": "des: 1","oncreate": "hide","type": "string","id": "amount"},
{"hint": {"ru": "", "en":""}, "title": {"ru": "Сумма с НДС", "en": "Amount VAT" }, "is_editable": "true", "visible": "true","relation": "des: ","oncreate": "hide","type": "string","id": "amount_vat"}
]
for res in aaa:
doc['field_map'].append(res)
return doc
def get_field_date():
doc = {"hint": {"ru": "", "en": ""}, "title": {"ru": "Дата", "en": "Date" }, "visible": False, "oncreate": "edit", "type": "date", "is_editable": False, "id": "date" },
return doc
def get_field_user():
doc = {"hint": {"ru": "", "en": ""}, "title": {"ru": "Пользователь", "en": "User" }, "visible": False, "oncreate": "none", "type":"select", "relation": "des:users", "is_editable": False, "id": "user" }
return doc
def get_field_title():
doc = {"hint": {"ru": "Название", "en": "Title"}, "title": {"ru": "Название", "en": "Title"}, "is_editable": "true", "visible": "true", "oncreate": "edit", "id": "title"},
return doc
def get_doc_permissions(id):
return {"des:"+id: {"edit":"true", "delete":"true", "create":"true", "move":"true", "view":"true" }}
def get_doc_role(data, domain):
return {
"_id": "role:"+data['id'],
"title":data['title'],
"type": "group",
# "users": { "user:"+get_domain(): "true" },
"users": { "user:"+domain: "true" },
"permissions": { }
}
| 166.383178 | 2,456 | 0.565579 | 2,163 | 17,803 | 4.586685 | 0.084142 | 0.052212 | 0.066526 | 0.082552 | 0.828042 | 0.780264 | 0.744078 | 0.701341 | 0.679871 | 0.674025 | 0 | 0.000688 | 0.101837 | 17,803 | 106 | 2,457 | 167.95283 | 0.619762 | 0.002359 | 0 | 0.175 | 0 | 0 | 0.514788 | 0.013689 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1125 | false | 0 | 0 | 0.0625 | 0.225 | 0.0125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5a09f62a13d32035158484ba5a76a5313a67f9bf | 3,651 | py | Python | approxposterior/tests/test_findNewPoint.py | RoryBarnes/approxposterior | f80b068919732573471fbb488fcd72d35926c112 | [
"MIT"
] | 36 | 2018-04-05T23:37:55.000Z | 2021-07-22T09:28:33.000Z | approxposterior/tests/test_findNewPoint.py | RoryBarnes/approxposterior | f80b068919732573471fbb488fcd72d35926c112 | [
"MIT"
] | 38 | 2018-04-27T20:58:19.000Z | 2021-04-18T03:27:03.000Z | approxposterior/tests/test_findNewPoint.py | RoryBarnes/approxposterior | f80b068919732573471fbb488fcd72d35926c112 | [
"MIT"
] | 11 | 2018-04-25T16:48:52.000Z | 2021-11-24T02:30:10.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test finding a new design point, thetaT
@author: David P. Fleming [University of Washington, Seattle], 2018
@email: dflemin3 (at) uw (dot) edu
"""
from approxposterior import approx, likelihood as lh, gpUtils
import numpy as np
import george
def testFindAmp():
"""
Test the findNextPoint function.
"""
# Define algorithm parameters
m0 = 50 # Initial size of training set
bounds = ((-5,5), (-5,5)) # Prior bounds
algorithm = "bape"
# For reproducibility
seed = 57
np.random.seed(seed)
# Randomly sample initial conditions from the prior
# Note: adding corner cases because approxposterior loves corners
theta = np.array(list(lh.rosenbrockSample(m0)) + [[-5, 5], [5, 5]])
# Evaluate forward model log likelihood + lnprior for each theta
y = np.zeros(len(theta))
for ii in range(len(theta)):
y[ii] = lh.rosenbrockLnlike(theta[ii]) + lh.rosenbrockLnprior(theta[ii])
# Set up a gp
gp = gpUtils.defaultGP(theta, y, fitAmp=True)
# Initialize object using the Wang & Li (2017) Rosenbrock function example
# using default ExpSquaredKernel GP
ap = approx.ApproxPosterior(theta=theta,
y=y,
gp=gp,
lnprior=lh.rosenbrockLnprior,
lnlike=lh.rosenbrockLnlike,
priorSample=lh.rosenbrockSample,
bounds=bounds,
algorithm=algorithm)
# Find new point!
thetaT = ap.findNextPoint(computeLnLike=False,
bounds=bounds,
seed=seed)
err_msg = "findNextPoint selected incorrect thetaT."
assert(np.allclose(thetaT, [-2.03449242, -3.07172107], rtol=1.0e-3)), err_msg
# end function
def testFindNoAmp():
"""
Test the findNextPoint function.
"""
# Define algorithm parameters
m0 = 50 # Initial size of training set
bounds = ((-5,5), (-5,5)) # Prior bounds
algorithm = "bape"
# For reproducibility
seed = 57
np.random.seed(seed)
# Randomly sample initial conditions from the prior
# Note: adding corner cases because approxposterior loves corners
theta = np.array(list(lh.rosenbrockSample(m0)) + [[-5, 5], [5, 5]])
# Evaluate forward model log likelihood + lnprior for each theta
y = np.zeros(len(theta))
for ii in range(len(theta)):
y[ii] = lh.rosenbrockLnlike(theta[ii]) + lh.rosenbrockLnprior(theta[ii])
# Set up a gp
gp = gpUtils.defaultGP(theta, y, fitAmp=False)
# Initialize object using the Wang & Li (2017) Rosenbrock function example
# using default ExpSquaredKernel GP
ap = approx.ApproxPosterior(theta=theta,
y=y,
gp=gp,
lnprior=lh.rosenbrockLnprior,
lnlike=lh.rosenbrockLnlike,
priorSample=lh.rosenbrockSample,
bounds=bounds,
algorithm=algorithm)
# Find new point!
thetaT = ap.findNextPoint(computeLnLike=False,
bounds=bounds,
seed=seed)
err_msg = "findNextPoint selected incorrect thetaT."
assert(np.allclose(thetaT, [0.79813416, 0.85542199], rtol=1.0e-3)), err_msg
# end function
if __name__ == "__main__":
testFindAmp()
testFindNoAmp()
| 32.309735 | 81 | 0.567516 | 388 | 3,651 | 5.309278 | 0.342784 | 0.01165 | 0.01165 | 0.007767 | 0.84466 | 0.84466 | 0.84466 | 0.84466 | 0.820388 | 0.820388 | 0 | 0.034668 | 0.336346 | 3,651 | 112 | 82 | 32.598214 | 0.815518 | 0.296357 | 0 | 0.777778 | 0 | 0 | 0.038339 | 0 | 0 | 0 | 0 | 0 | 0.037037 | 1 | 0.037037 | false | 0 | 0.055556 | 0 | 0.092593 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5a37ed9d3bf14288b49f37f9406312da67fb94cb | 30,848 | py | Python | pvfactors/tests/test_irradiance/test_models.py | tcapelle/pvfactors | 1aaf6cdd3066a3a68d93db4ad7abcf10e97b5620 | [
"BSD-3-Clause"
] | null | null | null | pvfactors/tests/test_irradiance/test_models.py | tcapelle/pvfactors | 1aaf6cdd3066a3a68d93db4ad7abcf10e97b5620 | [
"BSD-3-Clause"
] | null | null | null | pvfactors/tests/test_irradiance/test_models.py | tcapelle/pvfactors | 1aaf6cdd3066a3a68d93db4ad7abcf10e97b5620 | [
"BSD-3-Clause"
] | null | null | null | import pytest
from pvfactors.irradiance import IsotropicOrdered, HybridPerezOrdered
from pvfactors.geometry import OrderedPVArray, PVSurface, PVRow
from pvlib.tools import cosd
import numpy as np
import pandas as pd
import datetime as dt
@pytest.fixture(scope='function')
def params_irr():
pvarray_parameters = {
'n_pvrows': 3,
'pvrow_height': 2.5,
'pvrow_width': 2.,
'surface_azimuth': 90., # east oriented modules
'axis_azimuth': 0., # axis of rotation towards North
'surface_tilt': 20.,
'gcr': 0.6,
'solar_zenith': 65.,
'solar_azimuth': 90., # sun located in the east
'rho_ground': 0.2,
'rho_front_pvrow': 0.01,
'rho_back_pvrow': 0.03
}
yield pvarray_parameters
def test_isotropic_model_front(params_irr):
"""Direct shading on front surface"""
# Create and fit irradiance model
DNI = 1000.
DHI = 100.
irr_model = IsotropicOrdered()
irr_model.fit(None, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
# Check irradiance fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['front_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['back_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
pvarray.transform(idx=0)
# there should be some direct shading
assert pvarray.pvrows[0].front.shaded_length
# Get modeling vectors
irradiance_vec, rho_vec, invrho_vec, total_perez_vec = \
irr_model.get_full_modeling_vectors(pvarray, 0)
# Check transform
expected_irradiance_vec = [
422.61826174069944, 422.61826174069944, 422.61826174069944,
422.61826174069944, 422.61826174069944, 0.0,
707.10678118654744, 0.0, 0.0, 707.10678118654744, 0.0, 0.0,
707.10678118654744, 0.0, 100.]
# pvrow
np.testing.assert_almost_equal(
pvarray.pvrows[2].front.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.get_param_weighted('direct'), 0.)
# ground
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_vec,
irradiance_vec)
# Check invrho_vec
expected_invrho_vec = np.array([
5., 5., 5., 5., 5.,
5., 100., 100., 33.333333, 100.,
100., 33.333333, 100., 33.333333, 1.])
np.testing.assert_array_almost_equal(invrho_vec, expected_invrho_vec)
np.testing.assert_almost_equal(
pvarray.pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_vec = [
522.61826174, 522.61826174, 522.61826174, 522.61826174, 522.61826174,
100., 807.243186, 100.13640481, 0., 807.243186,
100.13640481, 0., 807.243186, 0., 100.]
np.testing.assert_array_almost_equal(total_perez_vec,
expected_total_perez_vec)
def test_isotropic_model_back(params_irr):
"""Direct shading on back surface"""
params_irr.update({'surface_azimuth': 270,
'surface_tilt': 160})
# Apply irradiance model
DNI = 1000.
DHI = 100.
irr_model = IsotropicOrdered()
irr_model.fit(None, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
# Check fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['back_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['front_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
pvarray.transform(idx=0)
# there should be some direct shading
assert pvarray.pvrows[0].back.shaded_length
# Get modeling vectors
irradiance_vec, rho_vec, invrho_vec, total_perez_vec = \
irr_model.get_full_modeling_vectors(pvarray, 0)
# Check
expected_irradiance_vec = [
422.61826174069944, 422.61826174069944, 422.61826174069944,
422.61826174069944, 422.61826174069944, 0.0, 0.0, 707.10678118654755,
0.0, 0.0, 707.10678118654755, 0.0, 0.0, 707.10678118654755, 100.]
# pvrow
np.testing.assert_almost_equal(
pvarray.pvrows[2].back.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.pvrows[0].front.get_param_weighted('direct'), 0.)
# ground
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_vec,
irradiance_vec)
# Check invrho_vec
expected_invrho_vec = np.array([5., 5., 5., 5., 5.,
5., 100., 33.333333, 33.333333, 100.,
33.333333, 33.333333, 100., 33.333333, 1.])
np.testing.assert_array_almost_equal(invrho_vec, expected_invrho_vec)
np.testing.assert_almost_equal(
pvarray.pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_vec = [
522.618262, 522.618262, 522.618262, 522.618262, 522.618262,
100., 104.387248, 0., 0., 104.387248,
0., 0., 104.387248, 0., 100.]
np.testing.assert_array_almost_equal(total_perez_vec,
expected_total_perez_vec)
def test_hybridperez_ordered_front(params_irr):
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_model = HybridPerezOrdered(horizon_band_angle=6.5)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
expected_circ_pvrow = 61.542748619313045
# FIXME: it doesn't seem right that circumsolar stronger on ground
expected_circ_ground = 36.782407037017585
expected_hor_pvrow_no_shad = 7.2486377533042452
expected_hor_pvrow_w_shad = 2.1452692285058985
horizon_shading_pct = 70.404518731426592
# Check fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['front_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['back_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
pvarray.transform(idx=0)
# there should be some direct shading
assert pvarray.pvrows[0].front.shaded_length
# Get modeling vectors
irradiance_vec, rho_vec, invrho_vec, total_perez_vec = \
irr_model.get_full_modeling_vectors(pvarray, 0)
# Test isotropic_luminance
np.testing.assert_almost_equal(irr_model.isotropic_luminance,
63.21759296)
# Check transform
expected_irradiance_vec = [
459.400669, 459.400669, 459.400669, 459.400669, 459.400669,
0., 775.898168, 7.248638, 7.248638, 775.898168,
7.248638, 2.145269, 775.898168, 2.145269, 63.217593]
# pvrow direct
np.testing.assert_almost_equal(
pvarray.pvrows[2].front.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.get_param_weighted('direct'), 0.)
# pvrow circumsolar
np.testing.assert_almost_equal(
pvarray.pvrows[2].front.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.illum_collection.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.shaded_collection.get_param_weighted('circumsolar'), 0.)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('circumsolar'), 0.)
# pvrow horizon
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.illum_collection.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.shaded_collection.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.list_segments[0]
.illum_collection.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('horizon_shd_pct'),
horizon_shading_pct)
# ground
np.testing.assert_almost_equal(
pvarray.ground.get_param_weighted('horizon'), 0.)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.illum_collection.get_param_weighted('circumsolar'),
expected_circ_ground)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_vec,
irradiance_vec)
# Check invrho_vec
expected_invrho_vec = np.array([
5., 5., 5., 5., 5.,
5., 100., 100., 33.333333, 100.,
100., 33.333333, 100., 33.333333, 1.])
np.testing.assert_array_almost_equal(invrho_vec, expected_invrho_vec)
np.testing.assert_almost_equal(
pvarray.pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_vec = [
522.618262, 522.618262, 522.618262, 522.618262, 522.618262,
63.217593, 807.243186, 38.593656, 0., 807.243186,
38.593656, 0., 807.243186, 0., 63.217593]
np.testing.assert_array_almost_equal(total_perez_vec,
expected_total_perez_vec)
def test_hybridperez_ordered_back(params_irr):
params_irr.update({'surface_azimuth': 270,
'surface_tilt': 160})
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_model = HybridPerezOrdered(horizon_band_angle=50)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Expected values
expected_dni_pvrow = DNI * cosd(45)
expected_dni_ground = DNI * cosd(65)
expected_circ_pvrow = 61.542748619313045
# FIXME: it doesn't seem right that circumsolar stronger on ground
expected_circ_ground = 36.782407037017585
expected_hor_pvrow_no_shad = 7.2486377533042452
expected_hor_pvrow_w_shad_1 = 6.0760257690033654
expected_hor_pvrow_w_shad_2 = 3.6101632102156898
horizon_shading_pct_1 = 16.176997998918541
horizon_shading_pct_2 = 50.195287265251757
# Check fitting
np.testing.assert_almost_equal(irr_model.direct['ground_illum'][0],
expected_dni_ground)
np.testing.assert_almost_equal(irr_model.direct['back_illum_pvrow'][0],
expected_dni_pvrow)
assert irr_model.direct['front_illum_pvrow'][0] == 0.
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
pvarray.transform(idx=0)
# there should be some direct shading
assert pvarray.pvrows[0].back.shaded_length
# Get modeling vectors
irradiance_vec, rho_vec, invrho_vec, total_perez_vec = \
irr_model.get_full_modeling_vectors(pvarray, 0)
# Test isotropic_luminance
np.testing.assert_almost_equal(irr_model.isotropic_luminance,
63.21759296)
# Check transform
expected_irradiance_vec = [
459.400669, 459.400669, 459.400669, 459.400669, 459.400669,
0., 7.248638, 774.725556, 3.610163, 7.248638,
774.725556, 3.610163, 7.248638, 775.898168, 63.217593]
# pvrow direct
np.testing.assert_almost_equal(
pvarray.pvrows[2].back.get_param_weighted('direct'),
expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_almost_equal(
pvarray.pvrows[0].front.get_param_weighted('direct'), 0.)
# pvrow circumsolar
np.testing.assert_almost_equal(
pvarray.pvrows[2].back.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('circumsolar'),
expected_circ_pvrow)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.shaded_collection.get_param_weighted('circumsolar'), 0.)
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.list_segments[0]
.illum_collection.get_param_weighted('circumsolar'), 0.)
# pvrow horizon
np.testing.assert_almost_equal(
pvarray.pvrows[1].front.get_param_weighted('horizon'),
expected_hor_pvrow_no_shad)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_1)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.shaded_collection.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_2)
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.list_segments[0]
.illum_collection.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_1)
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.list_segments[0]
.shaded_collection.get_param_weighted('horizon'),
expected_hor_pvrow_w_shad_2)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.illum_collection.get_param_weighted('horizon_shd_pct'),
horizon_shading_pct_1)
np.testing.assert_almost_equal(
pvarray.pvrows[1].back.list_segments[0]
.shaded_collection.get_param_weighted('horizon_shd_pct'),
horizon_shading_pct_2)
# ground
np.testing.assert_almost_equal(
pvarray.ground.get_param_weighted('horizon'), 0.)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.illum_collection.get_param_weighted('direct'), expected_dni_ground)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.illum_collection.get_param_weighted('circumsolar'),
expected_circ_ground)
np.testing.assert_almost_equal(
pvarray.ground.list_segments[0]
.shaded_collection.get_param_weighted('direct'), 0.)
np.testing.assert_array_almost_equal(expected_irradiance_vec,
irradiance_vec)
# Check invrho_vec
expected_invrho_vec = np.array([5., 5., 5., 5., 5.,
5., 100., 33.333333, 33.333333, 100.,
33.333333, 33.333333, 100., 33.333333, 1.])
np.testing.assert_array_almost_equal(invrho_vec, expected_invrho_vec)
np.testing.assert_almost_equal(
pvarray.pvrows[0].front.get_param_weighted('rho'),
params_irr['rho_front_pvrow'])
np.testing.assert_almost_equal(
pvarray.pvrows[0].back.get_param_weighted('rho'),
params_irr['rho_back_pvrow'])
np.testing.assert_almost_equal(
pvarray.ground.get_param_weighted('rho'),
params_irr['rho_ground'])
# Check total perez vec
expected_total_perez_vec = [
522.618262, 522.618262, 522.618262, 522.618262, 522.618262,
63.217593, 104.387248, 0., 0., 104.387248,
0., 0., 104.387248, 0., 63.217593]
np.testing.assert_array_almost_equal(total_perez_vec,
expected_total_perez_vec)
def test_hybridperez_circ_shading():
"""Check that the function works and returns expected outputs"""
circumsolar_angle = 30.
circumsolar_model = 'uniform_disk'
irr_model = HybridPerezOrdered(circumsolar_angle=circumsolar_angle,
circumsolar_model=circumsolar_model)
surf = PVSurface(coords=[(0, -1), (0, 1)])
pvrows = [PVRow.from_linestring_coords([(1, -1), (1, 1)])]
solar_2d_vector = [1.2, 1] # <45 deg elevation so should have >50% shading
idx_neighbor = 0
circ_shading_pct = irr_model._calculate_circumsolar_shading_pct(
surf, idx_neighbor, pvrows, solar_2d_vector)
np.testing.assert_almost_equal(circ_shading_pct, 71.5969299216)
def test_hybridperez_horizon_shading_ts():
# Base params
params = {
'n_pvrows': 3,
'pvrow_height': 1,
'pvrow_width': 1,
'axis_azimuth': 0.,
'gcr': 0.3
}
# Timeseries inputs
df_inputs = pd.DataFrame({
'solar_zenith': [70., 80., 80., 70., 10.],
'solar_azimuth': [270., 90., 270., 90., 90.],
'surface_tilt': [20., 10., 20., 30., 0.],
'surface_azimuth': [270., 270., 90., 90., 90.]})
# Initialize and fit pv array
pvarray = OrderedPVArray.init_from_dict(params)
# Fit pv array to timeseries data
pvarray.fit(df_inputs.solar_zenith, df_inputs.solar_azimuth,
df_inputs.surface_tilt, df_inputs.surface_azimuth)
# irradiance model
model = HybridPerezOrdered(horizon_band_angle=15.)
pvrow_idx = 1
centroid_coords = (pvarray.ts_pvrows[pvrow_idx].back.list_segments[0]
.coords.centroid)
tilted_to_left = pvarray.rotation_vec > 0
horizon_pct_shading = model._calculate_horizon_shading_pct_ts(
pvarray.ts_pvrows, centroid_coords, pvrow_idx, tilted_to_left,
is_back_side=True)
# Check that values stay consistent
expected_pct_shading = np.array(
[17.163813, 8.667262, 17.163813, 25.317135, 0.])
np.testing.assert_allclose(expected_pct_shading, horizon_pct_shading)
def test_hybridperez_transform(df_inputs_clearsky_8760):
n_points = 24
df_inputs = df_inputs_clearsky_8760.iloc[:n_points, :]
# Base params
params = {
'n_pvrows': 3,
'pvrow_height': 1,
'pvrow_width': 1,
'axis_azimuth': 0.,
'gcr': 0.3
}
albedo = 0.2
# Initialize and fit pv array
pvarray = OrderedPVArray.init_from_dict(params)
# Fit pv array to timeseries data
pvarray.fit(df_inputs.solar_zenith, df_inputs.solar_azimuth,
df_inputs.surface_tilt, df_inputs.surface_azimuth)
# irradiance model
model = HybridPerezOrdered(horizon_band_angle=15.)
model.fit(df_inputs.index, df_inputs.dni.values, df_inputs.dhi.values,
df_inputs.solar_zenith.values, df_inputs.solar_azimuth.values,
df_inputs.surface_tilt.values, df_inputs.surface_azimuth.values,
albedo)
model.transform(pvarray)
# Check timeseries parameters
expected_middle_back_horizon = np.array(
[0., 0., 0., 0., 0., 0.,
0., 0.8244883, 4.43051118, 6.12136418, 6.03641816, 2.75109931,
3.15586037, 6.14709947, 6.02242241, 4.25283177, 0.58518296, 0.,
0., 0., 0., 0., 0., 0.])
np.testing.assert_allclose(
expected_middle_back_horizon,
pvarray.ts_pvrows[1].back.list_segments[0].illum.params['horizon'])
expected_ground_circ = np.array(
[0., 0., 0., 0., 0.,
0., 0., 2.19047189, 8.14152575, 13.9017384,
18.54394777, 21.11510529, 21.00554831, 18.24251837, 13.47583799,
7.66930532, 1.74693357, 0., 0., 0.,
0., 0., 0., 0.])
np.testing.assert_allclose(
expected_ground_circ,
pvarray.ts_ground.illum_params['circumsolar'])
np.testing.assert_allclose(
np.zeros(n_points),
pvarray.ts_ground.shaded_params['circumsolar'])
# Check at a given time idx
pvrow = pvarray.ts_pvrows[1].at(7)
np.testing.assert_allclose(
pvrow.back.list_segments[0].illum_collection
.get_param_weighted('horizon'),
expected_middle_back_horizon[7])
pvground = pvarray.ts_ground.at(7)
np.testing.assert_allclose(
pvground.list_segments[0].illum_collection
.get_param_weighted('circumsolar'),
expected_ground_circ[7])
def test_hybridperez_ordered_transparency_spacing_front(params_irr):
"""Check that module transparency and spacing params are applied
correctly in HybridPerezOrdered"""
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_parameters = {'horizon_band_angle': 6.5,
'module_transparency': 0.1,
'module_spacing_ratio': 0.1}
irr_model = HybridPerezOrdered(**irr_parameters)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
pvarray.transform(idx=0)
gnd_seg = pvarray.ground.list_segments[0]
pvrow_front = pvarray.pvrows[1].front
# check that front is shaded
assert pvrow_front.shaded_length > 0
# Run some checks
surf_gnd_shaded = gnd_seg.shaded_collection.list_surfaces[0]
surf_gnd_illum = gnd_seg.illum_collection.list_surfaces[0]
np.testing.assert_allclose(surf_gnd_illum.get_param('circumsolar') * 0.19,
surf_gnd_shaded.get_param('circumsolar'))
np.testing.assert_allclose(surf_gnd_illum.get_param('direct') * 0.19,
surf_gnd_shaded.get_param('direct'))
# Run check on pvrow surfaces
surf_pvrow_shaded = (pvrow_front.list_segments[0]
.shaded_collection.list_surfaces[0])
surf_pvrow_illum = (pvrow_front.list_segments[0]
.illum_collection.list_surfaces[0])
np.testing.assert_allclose(surf_pvrow_illum.get_param('direct') * 0.19,
surf_pvrow_shaded.get_param('direct'))
np.testing.assert_allclose(
surf_pvrow_illum.get_param('circumsolar') * 0.19,
surf_pvrow_shaded.get_param('circumsolar'))
def test_hybridperez_ordered_transparency_spacing_back(params_irr):
"""Check that module transparency and spacing params are applied
correctly in HybridPerezOrdered"""
params_irr.update({'surface_azimuth': 270,
'surface_tilt': 160})
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_parameters = {'horizon_band_angle': 6.5,
'module_transparency': 0.1,
'module_spacing_ratio': 0.1}
irr_model = HybridPerezOrdered(**irr_parameters)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
pvarray.transform(idx=0)
gnd_seg = pvarray.ground.list_segments[0]
pvrow_back = pvarray.pvrows[1].back
# check that back is shaded
assert pvrow_back.shaded_length > 0
# Run some checks on gnd surfaces
surf_gnd_shaded = gnd_seg.shaded_collection.list_surfaces[0]
surf_gnd_illum = gnd_seg.illum_collection.list_surfaces[0]
np.testing.assert_allclose(surf_gnd_illum.get_param('circumsolar') * 0.19,
surf_gnd_shaded.get_param('circumsolar'))
np.testing.assert_allclose(surf_gnd_illum.get_param('direct') * 0.19,
surf_gnd_shaded.get_param('direct'))
# Run check on pvrow surfaces
surf_pvrow_shaded = (pvrow_back.list_segments[0]
.shaded_collection.list_surfaces[0])
surf_pvrow_illum = (pvrow_back.list_segments[0]
.illum_collection.list_surfaces[0])
np.testing.assert_allclose(surf_pvrow_illum.get_param('direct') * 0.19,
surf_pvrow_shaded.get_param('direct'))
np.testing.assert_allclose(
surf_pvrow_illum.get_param('circumsolar') * 0.19,
surf_pvrow_shaded.get_param('circumsolar'))
def test_isotropic_ordered_transparency_spacing(params_irr):
"""Check that module transparency and spacing params are applied
correctly in IsotropicOrdered"""
# Apply irradiance model
DNI = 1000.
DHI = 100.
ts = dt.datetime(2019, 6, 14, 11)
irr_parameters = {'module_transparency': 0.1,
'module_spacing_ratio': 0.1}
irr_model = IsotropicOrdered(**irr_parameters)
irr_model.fit(ts, DNI, DHI,
params_irr['solar_zenith'],
params_irr['solar_azimuth'],
params_irr['surface_tilt'],
params_irr['surface_azimuth'],
params_irr['rho_ground'])
# Create, fit, and transform pv array
pvarray = OrderedPVArray.fit_from_dict_of_scalars(
params_irr, param_names=IsotropicOrdered.params)
irr_model.transform(pvarray)
pvarray.transform(idx=0)
gnd_seg = pvarray.ground.list_segments[0]
pvrow_front = pvarray.pvrows[1].front
# check that front is shaded
assert pvrow_front.shaded_length > 0
# Run some checks
surf_gnd_shaded = gnd_seg.shaded_collection.list_surfaces[0]
surf_gnd_illum = gnd_seg.illum_collection.list_surfaces[0]
np.testing.assert_allclose(surf_gnd_illum.get_param('direct') * 0.19,
surf_gnd_shaded.get_param('direct'))
# Run check on pvrow surfaces
surf_pvrow_shaded = (pvrow_front.list_segments[0]
.shaded_collection.list_surfaces[0])
surf_pvrow_illum = (pvrow_front.list_segments[0]
.illum_collection.list_surfaces[0])
np.testing.assert_allclose(surf_pvrow_illum.get_param('direct') * 0.19,
surf_pvrow_shaded.get_param('direct'))
| 40.48294 | 79 | 0.663187 | 3,875 | 30,848 | 4.960516 | 0.074323 | 0.046353 | 0.077255 | 0.077567 | 0.861669 | 0.85449 | 0.843096 | 0.836073 | 0.830611 | 0.829154 | 0 | 0.087939 | 0.232138 | 30,848 | 761 | 80 | 40.536137 | 0.723561 | 0.075791 | 0 | 0.774958 | 0 | 0 | 0.067782 | 0 | 0 | 0 | 0 | 0.001314 | 0.186125 | 1 | 0.018613 | false | 0 | 0.011844 | 0 | 0.030457 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5a5fcd149ac66e702554cea5c21773e3ed769e68 | 29 | py | Python | python/ql/test/query-tests/Imports/cyclic-module/module6.py | vadi2/codeql | a806a4f08696d241ab295a286999251b56a6860c | [
"MIT"
] | 4,036 | 2020-04-29T00:09:57.000Z | 2022-03-31T14:16:38.000Z | python/ql/test/query-tests/Imports/cyclic-module/module6.py | vadi2/codeql | a806a4f08696d241ab295a286999251b56a6860c | [
"MIT"
] | 2,970 | 2020-04-28T17:24:18.000Z | 2022-03-31T22:40:46.000Z | python/ql/test/query-tests/Imports/cyclic-module/module6.py | ScriptBox99/github-codeql | 2ecf0d3264db8fb4904b2056964da469372a235c | [
"MIT"
] | 794 | 2020-04-29T00:28:25.000Z | 2022-03-30T08:21:46.000Z | def foo():
import module7 | 14.5 | 18 | 0.655172 | 4 | 29 | 4.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045455 | 0.241379 | 29 | 2 | 18 | 14.5 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0.5 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5a623688e5a0a1c0ec28abff20e7a06b229193d3 | 214 | py | Python | basicsr/metrics/__init__.py | jnjaby/DISCNet | 63b1859519091f8790afcc47e8c726cbefdcd0fe | [
"MIT"
] | 50 | 2021-04-20T14:38:44.000Z | 2022-03-30T03:14:41.000Z | basicsr/metrics/__init__.py | jnjaby/DISCNet | 63b1859519091f8790afcc47e8c726cbefdcd0fe | [
"MIT"
] | 14 | 2021-05-23T04:19:28.000Z | 2022-01-17T12:29:18.000Z | basicsr/metrics/__init__.py | jnjaby/DISCNet | 63b1859519091f8790afcc47e8c726cbefdcd0fe | [
"MIT"
] | 4 | 2021-08-14T03:28:20.000Z | 2022-03-03T00:52:58.000Z | from .niqe import calculate_niqe
from .psnr_ssim import calculate_psnr, calculate_ssim
from .lpips_metric import calculate_lpips
__all__ = ['calculate_psnr', 'calculate_ssim', 'calculate_niqe', 'calculate_lpips']
| 35.666667 | 83 | 0.82243 | 28 | 214 | 5.785714 | 0.321429 | 0.277778 | 0.271605 | 0.320988 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093458 | 214 | 5 | 84 | 42.8 | 0.835052 | 0 | 0 | 0 | 0 | 0 | 0.266355 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5a72d11e3dba7eb509c6a2db8d3dc449b7302f87 | 92 | py | Python | fury/ui/__init__.py | SunTzunami/fury | 39a28039fab8ba3070c0a7c1cdb1eed263f59971 | [
"BSD-3-Clause"
] | 149 | 2018-09-20T18:36:16.000Z | 2022-03-29T05:16:25.000Z | fury/ui/__init__.py | SunTzunami/fury | 39a28039fab8ba3070c0a7c1cdb1eed263f59971 | [
"BSD-3-Clause"
] | 523 | 2018-09-20T16:57:16.000Z | 2022-03-31T18:52:41.000Z | fury/ui/__init__.py | SunTzunami/fury | 39a28039fab8ba3070c0a7c1cdb1eed263f59971 | [
"BSD-3-Clause"
] | 150 | 2018-10-10T07:21:27.000Z | 2022-03-29T08:33:17.000Z |
from fury.ui.core import *
from fury.ui.containers import *
from fury.ui.elements import *
| 18.4 | 32 | 0.76087 | 15 | 92 | 4.666667 | 0.466667 | 0.342857 | 0.428571 | 0.457143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.141304 | 92 | 4 | 33 | 23 | 0.886076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
5a88da876203d2bc84410ce49426ed32295ee961 | 42,636 | py | Python | pytests/gsi/index_config_stats_gsi.py | couchbaselabs/testrunner-bharath | 96af90070da2140cc11c549db7403f5ea3b76d34 | [
"Apache-2.0"
] | null | null | null | pytests/gsi/index_config_stats_gsi.py | couchbaselabs/testrunner-bharath | 96af90070da2140cc11c549db7403f5ea3b76d34 | [
"Apache-2.0"
] | null | null | null | pytests/gsi/index_config_stats_gsi.py | couchbaselabs/testrunner-bharath | 96af90070da2140cc11c549db7403f5ea3b76d34 | [
"Apache-2.0"
] | null | null | null | from .base_gsi import BaseSecondaryIndexingTests
from remote.remote_util import RemoteMachineShellConnection
from membase.api.rest_client import RestConnection, RestHelper
from pytests.query_tests_helper import QueryHelperTests
import time
class SecondaryIndexingStatsConfigTests(BaseSecondaryIndexingTests, QueryHelperTests):
def setUp(self):
super(SecondaryIndexingStatsConfigTests, self).setUp()
self.flush_bucket = self.input.param('flush_bucket', False)
self.move_index = self.input.param('move_index', False)
def suite_setUp(self):
pass
def tearDown(self):
super(SecondaryIndexingStatsConfigTests, self).tearDown()
def suite_tearDown(self):
pass
def test_key_size_distribution(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
doc = {"indexer.statsPersistenceInterval": 60}
rest.set_index_settings_internal(doc)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : "%s" })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : "%s" })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = []
expected_distr2 = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
expected_distr2.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
self.sleep(60)
shell = RemoteMachineShellConnection(index_node)
output1, error1 = shell.execute_command("killall -9 indexer")
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
def test_key_size_distribution_nulls(self):
string_70 = "x" * 70
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : NULL })'
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : ""})'
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2018, u'(257-1024)': 0, u'(65-256)': 1, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 1}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
def test_key_size_distribution_objects(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
string_70 = "x" * 70
string_3000 = "x" * 3000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : {"name": "%s", "fake": "%s"} })' % (string_70, string_3000)
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 1, u'(4097-102400)': 0, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
if self.flush_bucket:
rest.flush_bucket("default")
self.sleep(30)
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : {"name": "%s", "fake": "%s"} })' % (
string_70, string_3000)
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
expected_distr = []
common_distr = "{u'(0-64)': 0, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append(
"{u'(0-64)': 0, u'(257-1024)': 0, u'(65-256)': 1, u'(4097-102400)': 0, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
def test_key_size_distribution_dml(self):
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
string_103000 = "x" * 103000
update_query = "UPDATE default SET name = '%s' WHERE name = 'employee-9'" % string_103000
self.n1ql_helper.run_cbq_query(query=update_query,
server=self.n1ql_node)
expected_distr = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2000, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 16}")
expected_distr.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
delete_query = "delete from default where name = 'employee-6'"
self.n1ql_helper.run_cbq_query(query=delete_query,
server=self.n1ql_node)
expected_distr2 = []
common_distr = "{u'(0-64)': 1944, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr2.append("{u'(0-64)': 1872, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 72}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr2)
def test_arrkey_size_distribution(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
doc = {"indexer.statsPersistenceInterval": 60}
rest.set_index_settings_internal(doc)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : ["%s","",null] })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : ["%s"] })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : ["%s"] })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : ["%s","string1"] })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : ["%s", "string2"] })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : ["%s"] })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
create_index_query1 = "CREATE INDEX idx ON default(distinct name) USING GSI"
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI"
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(distinct name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
expected_distr = "{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}"
expected_distr2 = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}"
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_arrkey_size(index_map, 'default', expected_distr)
self.verify_arrkey_size(index_map, 'standard_bucket0', expected_distr2)
self.sleep(60)
shell = RemoteMachineShellConnection(index_node)
output1, error1 = shell.execute_command("killall -9 indexer")
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_arrkey_size(index_map, 'default', expected_distr)
self.verify_arrkey_size(index_map, 'standard_bucket0', expected_distr2)
def test_keysize_rebalance_out(self):
rest = RestConnection(self.master)
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI WITH {'nodes': ['%s:%s']}" % (self.servers[1].ip, self.servers[1].port)
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI WITH {'nodes': ['%s:%s']}" % (self.servers[1].ip, self.servers[1].port)
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : "%s" })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : "%s" })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
expected_distr = []
expected_distr2 = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
expected_distr2.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
# remove the n1ql node which is being rebalanced out
# rebalance out a node
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [], [self.servers[1]])
reached = RestHelper(rest).rebalance_reached()
self.assertTrue(reached, "rebalance failed, stuck or did not complete")
rebalance.result()
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
def test_keysize_rebalance_in(self):
rest = RestConnection(self.master)
if self.move_index:
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query1,
server=self.n1ql_node)
create_index_query2 = "CREATE INDEX idx2 ON default(join_mo) USING GSI "
create_index_query3 = "CREATE INDEX idx ON standard_bucket0(name) USING GSI"
create_index_query4 = "CREATE INDEX idx2 ON standard_bucket0(join_mo) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=create_index_query4,
server=self.n1ql_node)
string_70 = "x" * 70
string_260 = "x" * 260
string_1030 = "x" * 1030
string_5000 = "x" * 5000
string_103000 = "x" * 103000
insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70
insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : "%s" })' % string_260
insert_query3 = 'INSERT INTO default (KEY, VALUE) VALUES ("id3", { "name" : "%s" })' % string_1030
insert_query4 = 'INSERT INTO default (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query2,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query3,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query4,
server=self.n1ql_node)
self.n1ql_helper.run_cbq_query(query=insert_query5,
server=self.n1ql_node)
insert_query1 = 'INSERT INTO standard_bucket0 (KEY, VALUE) VALUES ("id4", { "name" : "%s" })' % string_5000
self.n1ql_helper.run_cbq_query(query=insert_query1,
server=self.n1ql_node)
expected_distr = []
expected_distr2 = []
common_distr = "{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 0, u'(1025-4096)': 0, u'(102401-max)': 0}"
expected_distr.append("{u'(0-64)': 2016, u'(257-1024)': 1, u'(65-256)': 1, u'(4097-102400)': 1, u'(1025-4096)': 1, u'(102401-max)': 1}")
expected_distr.append(common_distr)
expected_distr2.append("{u'(0-64)': 2016, u'(257-1024)': 0, u'(65-256)': 0, u'(4097-102400)': 1, u'(1025-4096)': 0, u'(102401-max)': 0}")
expected_distr2.append(common_distr)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
services_in = ["index"]
rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [self.servers[self.nodes_init]], [],
services=services_in)
reached = RestHelper(rest).rebalance_reached()
self.assertTrue(reached, "rebalance failed, stuck or did not complete")
rebalance.result()
self.sleep(30)
if not self.move_index:
create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI"
self.n1ql_helper.run_cbq_query(query=create_index_query1, server=self.n1ql_node)
else:
alter_index_query = 'ALTER INDEX default.idx WITH {{"action":"move","nodes": ["{0}:{1}"]}}'.format(self.servers[self.nodes_init].ip, self.servers[self.nodes_init].port)
self.n1ql_helper.run_cbq_query(query=alter_index_query, server=self.n1ql_node)
self.sleep(20)
index_map = self.get_index_stats()
self.log.info(index_map)
self.verify_key_size(index_map, 'default', expected_distr)
self.verify_key_size(index_map, 'standard_bucket0', expected_distr2)
def verify_key_size(self, index_map, bucket, expected_distr):
for index in index_map[bucket]:
if index == 'idx':
self.log.info(index_map[bucket][index]['key_size_distribution'])
self.assertTrue(str(index_map[bucket][index]['key_size_distribution']) == expected_distr[0])
else:
self.log.info(index_map[bucket][index]['key_size_distribution'])
self.assertTrue(str(index_map[bucket][index]['key_size_distribution']) == expected_distr[1])
def verify_arrkey_size(self, index_map, bucket, expected_distr):
for index in index_map[bucket]:
if index == 'idx':
self.log.info(index_map[bucket][index]['arrkey_size_distribution'])
self.assertTrue(str(index_map[bucket][index]['arrkey_size_distribution']) == expected_distr)
else:
self.assertTrue("arrkey_size_distribution" not in str(index_map[bucket][index]))
def test_num_scan_timeouts(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
shell = RemoteMachineShellConnection(self.master)
create_index_query = "CREATE INDEX idx ON default(age) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
shell.execute_cbworkloadgen(rest.username, rest.password, 1000000, 70, 'default', 1024, '-j')
doc = {"indexer.settings.scan_timeout": 10}
rest.set_index_settings(doc)
query_params = {'scan_consistency': 'request_plus'}
select_query = "SELECT age from default"
self.n1ql_helper.run_cbq_query(query=select_query, server=self.n1ql_node, query_params=query_params)
index_map = self.get_index_stats()
official_stats = rest.get_index_official_stats()
self.log.info(index_map)
self.log.info(official_stats)
def test_avg_scan_latency(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
create_index_query = "CREATE INDEX idx ON default(name) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
if self.expected_err_msg not in str(ex):
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
else:
self.log.info("Index creation failed as expected")
select_query = "SELECT count(name) from default"
# Run select query 10 times
for i in range(0, 10):
self.n1ql_helper.run_cbq_query(query=select_query,
server=self.n1ql_node)
index_map = self.get_index_stats()
official_stats = rest.get_index_official_stats()
self.log.info(index_map)
self.log.info(official_stats)
self.assertTrue(index_map['default']['idx']['avg_scan_latency'] == official_stats['default:idx']['avg_scan_latency'])
def test_initial_build_progress(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
create_index_query = "CREATE INDEX idx ON default(name) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
init_time = time.time()
check = False
next_time = init_time
while not check:
index_status = rest.get_index_official_stats()
self.log.info(index_status)
if index_status['default:idx']['initial_build_progress'] == 100:
check = True
else:
check = False
time.sleep(1)
next_time = time.time()
check = check or (next_time - init_time > 60)
official_stats = rest.get_index_official_stats()
self.log.info(official_stats)
self.assertTrue(official_stats['default:idx']['initial_build_progress'] == 100)
def test_num_items_flushed(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
create_index_query = "CREATE INDEX idx ON default(age) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
official_stats = rest.get_index_official_stats()
self.log.info(official_stats)
self.assertTrue(official_stats['default:idx']['num_items_flushed'] == self.docs_per_day*2016)
def test_avg_drain_rate(self):
index_node = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=False)
rest = RestConnection(index_node)
shell = RemoteMachineShellConnection(self.master)
create_index_query = "CREATE INDEX idx ON default(age) USING GSI"
try:
self.n1ql_helper.run_cbq_query(query=create_index_query,
server=self.n1ql_node)
except Exception as ex:
self.log.info(str(ex))
self.fail(
"index creation did not fail with expected error : {0}".format(
str(ex)))
shell.execute_cbworkloadgen(rest.username, rest.password, 500000, 70, 'default', 1024, '-j')
official_stats = rest.get_index_official_stats()
index_map = self.get_index_stats()
self.log.info(index_map)
self.log.info(official_stats)
self.assertTrue(index_map['default']['idx']['avg_drain_rate'] == official_stats['default:idx']['avg_drain_rate'])
def test_index_stats(self):
"""
Tests index stats when indexes are created and dropped
"""
#Create Index
self.run_multi_operations(buckets = self.buckets,
query_definitions = self.query_definitions,
create_index = True, drop_index = False)
#Check Index Stats
self.sleep(30)
index_map = self.get_index_stats()
self.log.info(index_map)
for query_definition in self.query_definitions:
index_name = query_definition.index_name
for bucket in self.buckets:
bucket_name = bucket.name
check_keys = ['items_count', 'total_scan_duration', 'num_docs_queued',
'num_requests', 'num_rows_returned', 'num_docs_queued',
'num_docs_pending', 'delete_bytes' ]
map = self._create_stats_map(items_count=2016)
self._verify_index_stats(index_map, index_name, bucket_name, map, check_keys)
def test_index_storage_stats(self):
indexer_nodes = self.get_nodes_from_services_map(service_type="index",
get_all_nodes=True)
self.run_multi_operations(buckets = self.buckets,
query_definitions = self.query_definitions,
create_index = True, drop_index = False)
for node in indexer_nodes:
indexer_rest = RestConnection(node)
content = indexer_rest.get_index_storage_stats()
for index in list(content.values()):
for stats in list(index.values()):
self.log.info("MainStore Stats - {0}: {1}".format(
index, stats["MainStore"]))
self.log.info("BackStore Stats - {0}: {1}".format(
index, stats["BackStore"]))
self.assertEqual(stats["MainStore"]["resident_ratio"], 1.00,
"Resident ratio not 1")
def test_indexer_logs_for_leaked_password(self):
expected_msg = "http://%40index-cbauth@127.0.0.1:8091"
indexers = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True)
self.assertGreater(len(indexers), 0, "No indexer found in cluster")
for server in indexers:
shell = RemoteMachineShellConnection(server)
_, dir = RestConnection(server).diag_eval('filename:absname('
'element(2, '
'application:get_env('
'ns_server, error_logger_mf_dir))).')
indexer_log = str(dir) + '/indexer.log*'
count, err = shell.execute_command("zgrep \"{0}\" {1} | wc -l".
format(expected_msg, indexer_log))
if isinstance(count, list):
count = int(count[0])
else:
count = int(count)
shell.disconnect()
self.assertGreater(count, 0, "Password leak found in Indexer {0}".format(server.ip))
def test_get_index_settings(self):
#Check Index Settings
map = self.get_index_settings()
for node in list(map.keys()):
val = map[node]
gen = self._create_settings_map()
for key in list(gen.keys()):
self.assertTrue(key in list(val.keys()), "{0} not in {1} ".format(key, val))
def test_set_index_settings(self):
#Check Index Settings
map1 = self._set_settings_map()
self.log.info(map1)
self.set_index_settings(map1)
map = self.get_index_settings()
for node in list(map.keys()):
val = map[node]
for key in list(map1.keys()):
self.assertTrue(key in list(val.keys()), "{0} not in {1} ".format(key, val))
def _verify_index_stats(self, index_map, index_name, bucket_name, index_stat_values, check_keys=None):
self.assertIn(bucket_name, list(index_map.keys()), "bucket name {0} not present in stats".format(bucket_name))
self.assertIn(index_name, list(index_map[bucket_name].keys()),
"index name {0} not present in set of indexes {1}".format(index_name,
list(index_map[bucket_name].keys())))
for key in list(index_stat_values.keys()):
self.assertIn(key, list(index_map[bucket_name][index_name].keys()),
"stats {0} not present in Index stats {1}".format(key,
index_map[bucket_name][index_name]))
if check_keys:
if key in check_keys:
self.assertEqual(str(index_map[bucket_name][index_name][key]), str(index_stat_values[key]),
" for key {0} : {1} != {2}".format(key,
index_map[bucket_name][index_name][key],
index_stat_values[key]))
else:
self.assertEqual(str(index_stat_values[key]), str(index_map[bucket_name][index_name][key]),
" for key {0} : {1} != {2}".format(key,
index_map[bucket_name][index_name][key],
index_stat_values[key]))
def set_index_settings(self, settings):
servers = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True)
for server in servers:
RestConnection(server).set_index_settings(settings)
def get_index_settings(self):
servers = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True)
index_settings_map = {}
for server in servers:
key = "{0}:{1}".format(server.ip, server.port)
index_settings_map[key] = RestConnection(server).get_index_settings()
return index_settings_map
def _create_stats_map(self, items_count = 0, total_scan_duration = 0,
delete_bytes = 0, scan_wait_duration = 0, insert_bytes = 0,
num_rows_returned = 0, num_docs_indexed = 0, num_docs_pending = 0,
scan_bytes_read = 0, get_bytes = 0, num_docs_queued = 0,num_requests = 0,
disk_size = 0):
map = {}
map['items_count'] = items_count
map['disk_size'] = disk_size
map['items_count'] = items_count
map['total_scan_duration'] = total_scan_duration
map['delete_bytes'] = delete_bytes
map['scan_wait_duration'] = scan_wait_duration
map['insert_bytes'] = insert_bytes
map['num_rows_returned'] = num_rows_returned
map['num_docs_indexed'] = num_docs_indexed
map['num_docs_pending'] = num_docs_pending
map['scan_bytes_read'] = scan_bytes_read
map['get_bytes'] = get_bytes
map['num_docs_queued'] = num_docs_queued
map['num_requests'] = num_requests
return map
def _create_settings_map(self):
map = { "indexer.settings.recovery.max_rollbacks" : 5,
"indexer.settings.bufferPoolBlockSize" : 16384,
"indexer.settings.max_cpu_percent" : 400,
"queryport.client.settings.poolOverflow" : 30,
"indexer.settings.memProfile" : False,
"indexer.settings.statsLogDumpInterval" : 60,
"indexer.settings.persisted_snapshot.interval" : 5000,
"indexer.settings.inmemory_snapshot.interval" : 200,
"indexer.settings.compaction.check_period" : 30,
"indexer.settings.largeSnapshotThreshold" : 200,
"indexer.settings.log_level" : "debug",
"indexer.settings.scan_timeout" : 120000,
"indexer.settings.maxVbQueueLength" : 0,
"indexer.settings.send_buffer_size" : 1024,
"indexer.settings.compaction.min_size" : 1048576,
"indexer.settings.cpuProfDir" : "",
"indexer.settings.memory_quota" : 268435456,
"indexer.settings.memProfDir" : "",
"projector.settings.log_level" : "debug",
"queryport.client.settings.poolSize" : 1000,
"indexer.settings.max_writer_lock_prob" : 20,
"indexer.settings.compaction.interval" : "00:00,00:00",
"indexer.settings.cpuProfile" : False,
"indexer.settings.compaction.min_frag" : 30,
"indexer.settings.sliceBufSize" : 50000,
"indexer.settings.wal_size" : 4096,
"indexer.settings.fast_flush_mode" : True,
"indexer.settings.smallSnapshotThreshold" : 30,
"indexer.settings.persisted_snapshot_init_build.interval": 5000
}
return map
def _set_settings_map(self):
map = { "indexer.settings.recovery.max_rollbacks" : 4,
"indexer.settings.bufferPoolBlockSize" : 16384,
"indexer.settings.max_cpu_percent" : 400,
"indexer.settings.memProfile" : False,
"indexer.settings.statsLogDumpInterval" : 60,
"indexer.settings.persisted_snapshot.interval" : 5000,
"indexer.settings.inmemory_snapshot.interval" : 200,
"indexer.settings.compaction.check_period" : 31,
"indexer.settings.largeSnapshotThreshold" : 200,
"indexer.settings.log_level" : "debug",
"indexer.settings.scan_timeout" : 120000,
"indexer.settings.maxVbQueueLength" : 0,
"indexer.settings.send_buffer_size" : 1024,
"indexer.settings.compaction.min_size" : 1048576,
"indexer.settings.cpuProfDir" : "",
"indexer.settings.memory_quota" : 268435456,
"indexer.settings.memProfDir" : "",
"indexer.settings.persisted_snapshot_init_build.interval": 5000,
"indexer.settings.max_writer_lock_prob" : 20,
"indexer.settings.compaction.interval" : "00:00,00:00",
"indexer.settings.cpuProfile" : False,
"indexer.settings.compaction.min_frag" : 31,
"indexer.settings.sliceBufSize" : 50000,
"indexer.settings.wal_size" : 4096,
"indexer.settings.fast_flush_mode" : True,
"indexer.settings.smallSnapshotThreshold" : 30,
"projector.settings.log_level" : "debug",
"queryport.client.settings.poolSize" : 1000,
"queryport.client.settings.poolOverflow" : 30
}
return map
| 50.16 | 180 | 0.587274 | 5,110 | 42,636 | 4.644031 | 0.069472 | 0.045173 | 0.039526 | 0.047996 | 0.826261 | 0.805276 | 0.788504 | 0.781973 | 0.770385 | 0.760229 | 0 | 0.066711 | 0.295079 | 42,636 | 849 | 181 | 50.219081 | 0.722875 | 0.005207 | 0 | 0.710227 | 0 | 0.075284 | 0.252454 | 0.059433 | 0 | 0 | 0 | 0 | 0.028409 | 1 | 0.041193 | false | 0.008523 | 0.007102 | 0 | 0.055398 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ce803ec77d7fc6b9ae243cd4eef67d41ac2c8927 | 18,401 | py | Python | server/apps/verticals/shipping/tests/test_trip_info_api.py | iotile/iotile_cloud | 9dc65ac86d3a730bba42108ed7d9bbb963d22ba6 | [
"MIT"
] | null | null | null | server/apps/verticals/shipping/tests/test_trip_info_api.py | iotile/iotile_cloud | 9dc65ac86d3a730bba42108ed7d9bbb963d22ba6 | [
"MIT"
] | null | null | null | server/apps/verticals/shipping/tests/test_trip_info_api.py | iotile/iotile_cloud | 9dc65ac86d3a730bba42108ed7d9bbb963d22ba6 | [
"MIT"
] | null | null | null | import datetime
import json
from django.contrib.auth import get_user_model
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
from apps.datablock.models import DataBlock
from apps.org.models import Org
from apps.project.models import Project
from apps.streamdata.models import StreamData
from apps.streamdata.serializers import StreamDataSerializer
from apps.streamevent.models import StreamEventData
from apps.utils.data_mask.mask_utils import set_data_mask
from apps.utils.test_util import TestMixin
from apps.utils.timezone_utils import formatted_ts
from apps.utils.utest.devices import TripDeviceMock
user_model = get_user_model()
class APITripInfoTestCase(TestMixin, APITestCase):
def setUp(self):
self.usersTestSetup()
self.device_mock = TripDeviceMock()
self.o2 = Org.objects.get(slug='user-org')
self.o2.register_user(self.u2, role='a1')
self.p1 = Project.objects.get(name='Project 1')
self.pd1 = self.p1.devices.first()
def tearDown(self):
self.device_mock.tearDown()
self.userTestTearDown()
def testMock(self):
self.device_mock.testMock(self)
def test_inactive_trip(self):
"""
Test API for Inactive Trip
"""
url = reverse('shipping-trip-detail', kwargs={'slug': self.pd1.slug})
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='user1@foo.com', password='pass')
self.assertTrue(ok)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], self.pd1.slug)
self.assertEqual(deserialized['label'], self.pd1.label)
self.assertEqual(deserialized['state'], 'N0')
self.assertIsNone(deserialized['data_mask'])
self.client.logout()
ok = self.client.login(email='user2@foo.com', password='pass')
self.assertTrue(ok)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.client.logout()
ok = self.client.login(email='user3@foo.com', password='pass')
self.assertTrue(ok)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
self.pd1.org.register_user(self.u3, role='m1')
self.assertTrue(self.pd1.org.has_permission(self.u3, 'can_modify_device'))
self.assertFalse(self.pd1.org.has_permission(self.u3, 'can_manage_org_and_projects'))
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.pd1.org.de_register_user(self.u3, delete_obj=True)
self.pd1.org.register_user(self.u3, role='r1')
self.assertFalse(self.pd1.org.has_permission(self.u3, 'can_modify_device'))
self.assertFalse(self.pd1.org.has_permission(self.u3, 'can_manage_org_and_projects'))
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.client.logout()
def test_active_trip(self):
"""
Test API for Active Trip
"""
setup_url = reverse('shipping-trip-setup', kwargs={'slug': self.pd1.slug})
url = reverse('shipping-trip-detail', kwargs={'slug': self.pd1.slug})
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='user1@foo.com', password='pass')
self.assertTrue(ok)
# Trip Setup
self.pd1.state = 'N0'
self.pd1.save()
resp = self.client.post(setup_url, format='json', data={})
self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.pd1 = self.p1.devices.first()
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], self.pd1.slug)
self.client.logout()
ok = self.client.login(email='user2@foo.com', password='pass')
self.assertTrue(ok)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.client.logout()
ok = self.client.login(email='user3@foo.com', password='pass')
self.assertTrue(ok)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
self.pd1.org.register_user(self.u3, role='m1')
self.assertTrue(self.pd1.org.has_permission(self.u3, 'can_modify_device'))
self.assertFalse(self.pd1.org.has_permission(self.u3, 'can_manage_org_and_projects'))
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.pd1.org.de_register_user(self.u3, delete_obj=True)
self.pd1.org.register_user(self.u3, role='r1')
self.assertFalse(self.pd1.org.has_permission(self.u3, 'can_modify_device'))
self.assertFalse(self.pd1.org.has_permission(self.u3, 'can_manage_org_and_projects'))
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.client.logout()
def test_trip_mask(self):
"""
Test API for Active Trip
"""
setup_url = reverse('shipping-trip-setup', kwargs={'slug': self.pd1.slug})
url = reverse('shipping-trip-detail', kwargs={'slug': self.pd1.slug})
ok = self.client.login(email='user1@foo.com', password='pass')
self.assertTrue(ok)
# Trip Setup
self.pd1.state = 'N0'
self.pd1.save()
resp = self.client.post(setup_url, format='json', data={})
self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
self.pd1 = self.p1.devices.first()
start = StreamData.objects.get(stream_slug=self.pd1.get_stream_slug_for('0e00'))
end = StreamData.objects.get(stream_slug=self.pd1.get_stream_slug_for('0e01'))
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], self.pd1.slug)
self.assertEqual(deserialized['label'], self.pd1.label)
self.assertEqual(deserialized['state'], 'N1')
# There should be no Mask on data
self.assertIsNone(deserialized['data_mask'])
self.assertFalse(deserialized['trip_date_range']['masked'])
# Original start / end and actual start / end should be equal
self.assertEqual(deserialized['trip_date_range']['original_start'], formatted_ts(start.timestamp))
self.assertEqual(deserialized['trip_date_range']['original_end'], formatted_ts(end.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_start'], formatted_ts(start.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_end'], formatted_ts(end.timestamp))
# Start_data and end_data should hold data of TripStart and TripEnd events
self.assertEqual(deserialized['trip_date_range']['start_data'], StreamDataSerializer(start).data)
self.assertEqual(deserialized['trip_date_range']['end_data'], StreamDataSerializer(end).data)
# Should be 5 streams
self.assertEqual(len(deserialized['streams']), 5)
# Valid mask on start date and no mask on end date
mask_start = start.timestamp + datetime.timedelta(seconds=60)
set_data_mask(self.pd1, formatted_ts(mask_start), None, [], [], self.u1)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], self.pd1.slug)
self.assertEqual(deserialized['label'], self.pd1.label)
self.assertEqual(deserialized['state'], 'N1')
self.assertIsNotNone(deserialized['data_mask'])
self.assertEqual(deserialized['data_mask']['start'], formatted_ts(mask_start))
self.assertIsNone(deserialized['data_mask']['end'])
self.assertTrue(deserialized['trip_date_range']['masked'])
self.assertEqual(deserialized['trip_date_range']['original_start'], formatted_ts(start.timestamp))
self.assertEqual(deserialized['trip_date_range']['original_end'], formatted_ts(end.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_start'], formatted_ts(mask_start))
self.assertEqual(deserialized['trip_date_range']['actual_end'], formatted_ts(end.timestamp))
# Invalid mask on start date (older than actual start) and valid end mask
mask_start = start.timestamp - datetime.timedelta(seconds=60)
mask_end = end.timestamp - datetime.timedelta(seconds=60)
set_data_mask(self.pd1, formatted_ts(mask_start), formatted_ts(mask_end), [], [], self.u1)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], self.pd1.slug)
self.assertEqual(deserialized['label'], self.pd1.label)
self.assertEqual(deserialized['state'], 'N1')
self.assertTrue(deserialized['trip_date_range']['masked'])
self.assertEqual(deserialized['trip_date_range']['original_start'], formatted_ts(start.timestamp))
self.assertEqual(deserialized['trip_date_range']['original_end'], formatted_ts(end.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_start'], formatted_ts(start.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_end'], formatted_ts(mask_end))
self.assertIsNotNone(deserialized['data_mask'])
self.assertEqual(deserialized['data_mask']['start'], formatted_ts(mask_start))
self.assertEqual(deserialized['data_mask']['end'], formatted_ts(mask_end))
self.client.logout()
def test_trip_archive(self):
"""
Test API for Active Trip
"""
setup_url = reverse('shipping-trip-setup', kwargs={'slug': self.pd1.slug})
archive_url = reverse('shipping-trip-archive', kwargs={'slug': self.pd1.slug})
archive_payload = {
'title': 'This is a new archive',
}
ok = self.client.login(email='user1@foo.com', password='pass')
self.assertTrue(ok)
# Trip Setup
self.pd1.state = 'N0'
self.pd1.save()
resp = self.client.post(setup_url, format='json', data={})
self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
# Trip Archive
resp = self.client.post(archive_url, format='json', data=archive_payload)
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
deserialized = json.loads(resp.content.decode())
block_slug = deserialized['slug']
block = DataBlock.objects.get(slug=block_slug)
url = reverse('shipping-trip-detail', kwargs={'slug': block_slug})
start = StreamData.objects.get(stream_slug=block.get_stream_slug_for('0e00'))
end = StreamData.objects.get(stream_slug=block.get_stream_slug_for('0e01'))
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], block.slug)
self.assertEqual(deserialized['label'], block.title)
self.assertEqual(deserialized['state'], 'A')
self.assertIsNone(deserialized['data_mask'])
self.assertEqual(deserialized['trip_date_range']['original_start'], formatted_ts(start.timestamp))
self.assertEqual(deserialized['trip_date_range']['original_end'], formatted_ts(end.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_start'], formatted_ts(start.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_end'], formatted_ts(end.timestamp))
self.assertFalse(deserialized['trip_date_range']['masked'])
self.assertEqual(deserialized['trip_date_range']['start_data'], StreamDataSerializer(start).data)
self.assertEqual(deserialized['trip_date_range']['end_data'], StreamDataSerializer(end).data)
self.assertEqual(len(deserialized['streams']), 5)
self.client.logout()
def test_trip_info_saver(self):
"""
To test a saver, remove the start/end signals
"""
setup_url = reverse('shipping-trip-setup', kwargs={'slug': self.pd1.slug})
archive_url = reverse('shipping-trip-archive', kwargs={'slug': self.pd1.slug})
archive_payload = {
'title': 'This is a new archive',
}
ok = self.client.login(email='user1@foo.com', password='pass')
self.assertTrue(ok)
# Trip Setup
self.pd1.state = 'N0'
self.pd1.save()
resp = self.client.post(setup_url, format='json', data={})
self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
# Delete 0e00 and 0e01 to emulate saver
StreamData.objects.get(stream_slug=self.pd1.get_stream_slug_for('0e00')).delete()
StreamData.objects.get(stream_slug=self.pd1.get_stream_slug_for('0e01')).delete()
start_temp = StreamData.objects.filter(stream_slug=self.pd1.get_stream_slug_for('5023')).first()
end_temp = StreamData.objects.filter(stream_slug=self.pd1.get_stream_slug_for('5023')).last()
start_event = StreamEventData.objects.filter(stream_slug=self.pd1.get_stream_slug_for('5020')).first()
end_event = StreamEventData.objects.filter(stream_slug=self.pd1.get_stream_slug_for('5020')).last()
self.assertTrue(start_event.timestamp == start_temp.timestamp)
self.assertTrue(end_event.timestamp > end_temp.timestamp)
url = reverse('shipping-trip-detail', kwargs={'slug': self.pd1.slug})
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], self.pd1.slug)
self.assertEqual(deserialized['state'], 'N1')
self.assertIsNone(deserialized['data_mask'])
self.assertEqual(deserialized['trip_date_range']['original_start'], formatted_ts(start_temp.timestamp))
self.assertEqual(deserialized['trip_date_range']['original_end'], formatted_ts(end_event.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_start'], formatted_ts(start_temp.timestamp))
actual_ts_end = end_event.timestamp + datetime.timedelta(seconds=1)
self.assertEqual(deserialized['trip_date_range']['actual_end'], formatted_ts(actual_ts_end))
self.assertFalse(deserialized['trip_date_range']['masked'])
self.assertIsNone(deserialized['trip_date_range']['start_data'])
self.assertIsNone(deserialized['trip_date_range']['end_data'])
self.assertEqual(len(deserialized['streams']), 5)
# Trip Archive
resp = self.client.post(archive_url, format='json', data=archive_payload)
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
deserialized = json.loads(resp.content.decode())
block_slug = deserialized['slug']
block = DataBlock.objects.get(slug=block_slug)
url = reverse('shipping-trip-detail', kwargs={'slug': block_slug})
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], block.slug)
self.assertEqual(deserialized['state'], 'A')
self.assertIsNone(deserialized['data_mask'])
self.assertEqual(deserialized['trip_date_range']['original_start'], formatted_ts(start_temp.timestamp))
self.assertEqual(deserialized['trip_date_range']['original_end'], formatted_ts(end_event.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_start'], formatted_ts(start_temp.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_end'], formatted_ts(actual_ts_end))
self.assertFalse(deserialized['trip_date_range']['masked'])
self.assertIsNone(deserialized['trip_date_range']['start_data'])
self.assertIsNone(deserialized['trip_date_range']['end_data'])
self.assertEqual(len(deserialized['streams']), 5)
# Invalid (older) mask on start date and valid mask on end date
mask_start = start_event.timestamp - datetime.timedelta(seconds=60)
mask_end = end_event.timestamp - datetime.timedelta(seconds=60)
set_data_mask(block, formatted_ts(mask_start), formatted_ts(mask_end), [], [], self.u1)
resp = self.client.get(url, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
deserialized = json.loads(resp.content.decode())
self.assertEqual(deserialized['slug'], block.slug)
self.assertEqual(deserialized['state'], 'A')
self.assertIsNotNone(deserialized['data_mask'])
self.assertEqual(deserialized['data_mask']['start'], formatted_ts(mask_start))
self.assertEqual(deserialized['data_mask']['end'], formatted_ts(mask_end))
self.assertEqual(deserialized['trip_date_range']['original_start'], formatted_ts(start_temp.timestamp))
self.assertEqual(deserialized['trip_date_range']['original_end'], formatted_ts(end_event.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_start'], formatted_ts(start_temp.timestamp))
self.assertEqual(deserialized['trip_date_range']['actual_end'], formatted_ts(mask_end))
self.assertTrue(deserialized['trip_date_range']['masked'])
self.assertIsNone(deserialized['trip_date_range']['start_data'])
self.assertIsNone(deserialized['trip_date_range']['end_data'])
self.client.logout()
| 47.303342 | 111 | 0.689419 | 2,284 | 18,401 | 5.361208 | 0.078371 | 0.107799 | 0.130094 | 0.091874 | 0.883136 | 0.875704 | 0.854961 | 0.843936 | 0.82548 | 0.82548 | 0 | 0.015397 | 0.174067 | 18,401 | 388 | 112 | 47.425258 | 0.790301 | 0.033911 | 0 | 0.778182 | 0 | 0 | 0.129476 | 0.0085 | 0 | 0 | 0 | 0 | 0.469091 | 1 | 0.029091 | false | 0.032727 | 0.058182 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ce9d1ae3e11c247151d2bde84a712cf77b1ffeaf | 398 | py | Python | bitmovin_api_sdk/encoding/infrastructure/prewarmed_encoder_pools/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 11 | 2019-07-03T10:41:16.000Z | 2022-02-25T21:48:06.000Z | bitmovin_api_sdk/encoding/infrastructure/prewarmed_encoder_pools/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 8 | 2019-11-23T00:01:25.000Z | 2021-04-29T12:30:31.000Z | bitmovin_api_sdk/encoding/infrastructure/prewarmed_encoder_pools/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 13 | 2020-01-02T14:58:18.000Z | 2022-03-26T12:10:30.000Z | from bitmovin_api_sdk.encoding.infrastructure.prewarmed_encoder_pools.prewarmed_encoder_pools_api import PrewarmedEncoderPoolsApi
from bitmovin_api_sdk.encoding.infrastructure.prewarmed_encoder_pools.schedules.schedules_api import SchedulesApi
from bitmovin_api_sdk.encoding.infrastructure.prewarmed_encoder_pools.prewarmed_encoder_pool_list_query_params import PrewarmedEncoderPoolListQueryParams
| 99.5 | 153 | 0.937186 | 46 | 398 | 7.652174 | 0.391304 | 0.227273 | 0.238636 | 0.153409 | 0.610795 | 0.610795 | 0.610795 | 0.610795 | 0.610795 | 0.4375 | 0 | 0 | 0.030151 | 398 | 3 | 154 | 132.666667 | 0.911917 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
cebc6e653256cd20436e65c009bd4dcf84a6786b | 68,609 | py | Python | benchmarks/SimResults/Paper2_pinned_spec_base/cmp_namdcactusADMgromacsleslie3d/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/Paper2_pinned_spec_base/cmp_namdcactusADMgromacsleslie3d/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/Paper2_pinned_spec_base/cmp_namdcactusADMgromacsleslie3d/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.224326,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.378884,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.42087,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.501523,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.868456,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.498084,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.86806,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.277895,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 7.84562,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.268433,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0181806,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.206937,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.134457,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.47537,
'Execution Unit/Register Files/Runtime Dynamic': 0.152637,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.561501,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.45942,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 4.26438,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.92265e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.92265e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 4.25622e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.63047e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00193148,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0020725,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000483201,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.129257,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.318117,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.439014,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 0.888942,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0918721,
'L2/Runtime Dynamic': 0.0255076,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.69347,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.65146,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.176526,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.176526,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.53046,
'Load Store Unit/Runtime Dynamic': 3.69855,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.435283,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.870566,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.154483,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.155826,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0522591,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.825566,
'Memory Management Unit/Runtime Dynamic': 0.208086,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 29.824,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.936502,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0369143,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.243704,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.21712,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 10.3026,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.199229,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.359172,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.09797,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.351326,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.566676,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.286039,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.20404,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.23348,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.20187,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.207431,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0147362,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.180252,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.108983,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.387682,
'Execution Unit/Register Files/Runtime Dynamic': 0.123719,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.429558,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.927919,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 3.02023,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000747924,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000747924,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000652464,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00025314,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00156555,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00371386,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00713444,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.104768,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.217993,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.355841,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.689451,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0489845,
'L2/Runtime Dynamic': 0.0104078,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.79435,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.23214,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0827326,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0827326,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.18504,
'Load Store Unit/Runtime Dynamic': 1.72288,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.204005,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.408009,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0724019,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0731368,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0357388,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.680477,
'Memory Management Unit/Runtime Dynamic': 0.108876,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 23.6698,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.545656,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0224914,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.16751,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.735657,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 6.2875,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.130511,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.305198,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.748532,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.276215,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.445524,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.224886,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.946625,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.201148,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.5104,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.141414,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0115857,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.130862,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0856833,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.272275,
'Execution Unit/Register Files/Runtime Dynamic': 0.0972689,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.308324,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.695154,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.44962,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000593086,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000593086,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000521399,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000204479,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00123085,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00293842,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0055142,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0823695,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.23941,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.172787,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.279764,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.7122,
'Instruction Fetch Unit/Runtime Dynamic': 0.543373,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0333146,
'L2/Runtime Dynamic': 0.0070345,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.50955,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.0946,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0735185,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0735186,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.85672,
'Load Store Unit/Runtime Dynamic': 1.53068,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.181284,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.362569,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0643384,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0648357,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.325767,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0283346,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.592398,
'Memory Management Unit/Runtime Dynamic': 0.0931704,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 21.2945,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.371995,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0169891,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.133831,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.522815,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.1467,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0378934,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.232452,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.195445,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0895871,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.144501,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0729391,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.307027,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0724969,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.31424,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0369238,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00375769,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0417331,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0277904,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0786569,
'Execution Unit/Register Files/Runtime Dynamic': 0.0315481,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0973954,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.254961,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.23137,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000239945,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000239945,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000209379,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 8.12663e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000399211,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00108848,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00228671,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0267156,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.69934,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.073893,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0907383,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.00033,
'Instruction Fetch Unit/Runtime Dynamic': 0.194722,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0545929,
'L2/Runtime Dynamic': 0.0141123,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.28958,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.530256,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0340497,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0340498,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.45037,
'Load Store Unit/Runtime Dynamic': 0.732228,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0839607,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.167922,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0297979,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.030607,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.105659,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0121463,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.312955,
'Memory Management Unit/Runtime Dynamic': 0.0427533,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.722,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0971291,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00522397,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0441463,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.146499,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.36168,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 4.976365058509881,
'Runtime Dynamic': 4.976365058509881,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.235345,
'Runtime Dynamic': 0.0960744,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 89.7456,
'Peak Power': 122.858,
'Runtime Dynamic': 24.1945,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 89.5102,
'Total Cores/Runtime Dynamic': 24.0985,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.235345,
'Total L3s/Runtime Dynamic': 0.0960744,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.064551 | 124 | 0.68201 | 8,087 | 68,609 | 5.780141 | 0.067392 | 0.123567 | 0.112956 | 0.093445 | 0.93903 | 0.931157 | 0.919327 | 0.887408 | 0.867149 | 0.846804 | 0 | 0.13176 | 0.224329 | 68,609 | 914 | 125 | 75.064551 | 0.74659 | 0 | 0 | 0.646608 | 0 | 0 | 0.657411 | 0.048098 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ced444e9e8919b7e2ddcd38b0aec118d24d7f465 | 155 | py | Python | resume/views.py | HuMingqi/PublicNumber_In_Wechat | e2d05975b812dd579f2ce04853de8ee6d55985f1 | [
"MIT"
] | null | null | null | resume/views.py | HuMingqi/PublicNumber_In_Wechat | e2d05975b812dd579f2ce04853de8ee6d55985f1 | [
"MIT"
] | null | null | null | resume/views.py | HuMingqi/PublicNumber_In_Wechat | e2d05975b812dd579f2ce04853de8ee6d55985f1 | [
"MIT"
] | null | null | null | from django.shortcuts import render_to_response
# Create your views here.
def get_resume_index(request):
return render_to_response('index.html', {})
| 22.142857 | 47 | 0.780645 | 22 | 155 | 5.227273 | 0.818182 | 0.13913 | 0.278261 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 155 | 6 | 48 | 25.833333 | 0.851852 | 0.148387 | 0 | 0 | 0 | 0 | 0.076923 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
ced678ae2530cee9fcf4b638d03df64c38af9ac1 | 7,309 | py | Python | designs/pihdf/de0_nano_1/test/utest_de0_nano_1.py | hnikolov/de0_nano | 62de93ebc7154642b23408ced515db0ad63d2aec | [
"MIT"
] | null | null | null | designs/pihdf/de0_nano_1/test/utest_de0_nano_1.py | hnikolov/de0_nano | 62de93ebc7154642b23408ced515db0ad63d2aec | [
"MIT"
] | null | null | null | designs/pihdf/de0_nano_1/test/utest_de0_nano_1.py | hnikolov/de0_nano | 62de93ebc7154642b23408ced515db0ad63d2aec | [
"MIT"
] | null | null | null | import unittest
from myhdl_lib import *
from t_de0_nano_1 import t_de0_nano_1
class Test_de0_nano_1(t_de0_nano_1):
'''|
| The main class for unit-testing. Add your tests here.
|________'''
def __init__(self):
# call base class constructor
t_de0_nano_1.__init__(self)
# Automatically executed BEFORE every TestCase
def setUp(self):
t_de0_nano_1.setUp(self)
# Automatically executed AFTER every TestCase
def tearDown(self):
t_de0_nano_1.tearDown(self)
# Initialise data, duplicate output
def init_data_dupl(self):
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 5 })
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 10 })
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 15 })
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 0 })
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 5 })
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 10 })
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 15 })
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 0 })
self.ref_LEDs.append( { "data": 0 })
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_000(self):
""" >>>>>> TEST_000: TODO """
self.models = {"top":self.BEH}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":1, "cosimulation":False, "trace":False, "fdump":False, "ipgi":0, "ipgo":0}
self.run_it()
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_001(self):
""" >>>>>> TEST_001: Counting, duplicated output """
self.models = {"top":self.RTL}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":"auto", "cosimulation":True, "trace":False, "fdump":False, "ipgi":0, "ipgo":0}
self.init_data_dupl()
self.run_it()
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_002(self):
""" >>>>>> TEST_002: Counting, duplicated output """
self.models = {"top":self.RTL}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":"auto", "cosimulation":True, "trace":False, "fdump":False, "ipgi":2, "ipgo":0}
self.init_data_dupl()
self.run_it()
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_003(self):
""" >>>>>> TEST_003: Counting, duplicated output """
self.models = {"top":self.RTL}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":"auto", "cosimulation":True, "trace":True, "fdump":False, "ipgi":0, "ipgo":4}
self.init_data_dupl()
self.run_it()
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_004(self):
""" >>>>>> TEST_004: Counting, duplicated output """
self.models = {"top":self.RTL}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":"auto", "cosimulation":True, "trace":False, "fdump":False, "ipgi":1, "ipgo":3}
self.init_data_dupl()
self.run_it()
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_005(self):
""" >>>>>> TEST_005: Running light, left """
self.models = {"top":self.RTL}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":"auto", "cosimulation":True, "trace":False, "fdump":False, "ipgi":0, "ipgo":0}
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 1 }) # Registered in TOut
self.ref_LEDs.append( { "data": 2 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 4 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 8 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 1 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 2 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 4 })
self.run_it()
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_006(self):
""" >>>>>> TEST_006: Running light, right """
self.models = {"top":self.RTL}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":"auto", "cosimulation":True, "trace":False, "fdump":False, "ipgi":0, "ipgo":0}
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 2 }) # Registered in TOut
self.ref_LEDs.append( { "data": 4 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 2 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 1 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 8 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 4 })
self.stim_mode_1.append({ "data": 0 })
self.ref_LEDs.append( { "data": 2 })
self.run_it()
# ----------------------------------------------------------------------------
# @unittest.skip("")
def test_007(self):
""" >>>>>> TEST_007: static pattern. Note: Change the output from the incrementor for synthesis! """
self.models = {"top":self.RTL}
# Set fdump to True in order to generate test vector files for the global interfaces
self.tb_config = {"simulation_time":"auto", "cosimulation":True, "trace":False, "fdump":False, "ipgi":0, "ipgo":2}
self.stim_mode_1.append({ "data": 0 })
self.stim_mode_2.append({ "data": 3 })
self.ref_LEDs.append( { "data": 21 })
self.stim_mode_1.append({ "data": 1 })
self.stim_mode_2.append({ "data": 3 })
self.ref_LEDs.append( { "data": 21 })
self.run_it()
# ----------------------------------------------------------------------------
| 37.101523 | 122 | 0.518949 | 883 | 7,309 | 4.093998 | 0.11778 | 0.15491 | 0.112863 | 0.124481 | 0.82462 | 0.812172 | 0.806639 | 0.786722 | 0.77787 | 0.768188 | 0 | 0.031447 | 0.23861 | 7,309 | 196 | 123 | 37.290816 | 0.618149 | 0.293337 | 0 | 0.728155 | 0 | 0 | 0.12537 | 0 | 0 | 0 | 0 | 0.005102 | 0 | 1 | 0.116505 | false | 0 | 0.029126 | 0 | 0.15534 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0c8f2eab77d0c010aa2a195edc8fbcaebfd8d875 | 45 | py | Python | src/gluonnlp/torch/models/__init__.py | leezu/gluon-nlp | 19de74c2b03f22dde8311a0225b4571c2deef0e4 | [
"Apache-2.0"
] | 2,461 | 2018-04-25T03:47:22.000Z | 2022-03-31T03:58:48.000Z | src/gluonnlp/torch/models/__init__.py | leezu/gluon-nlp | 19de74c2b03f22dde8311a0225b4571c2deef0e4 | [
"Apache-2.0"
] | 1,450 | 2018-04-25T16:14:25.000Z | 2022-02-24T21:02:57.000Z | src/gluonnlp/torch/models/__init__.py | leezu/gluon-nlp | 19de74c2b03f22dde8311a0225b4571c2deef0e4 | [
"Apache-2.0"
] | 578 | 2018-04-25T04:55:18.000Z | 2022-03-16T03:01:45.000Z | from . import transformer
from . import bert
| 15 | 25 | 0.777778 | 6 | 45 | 5.833333 | 0.666667 | 0.571429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.177778 | 45 | 2 | 26 | 22.5 | 0.945946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0ca548f50cb1a362485399dd6ef17ee8716ff105 | 3,266 | py | Python | puskesmas_app/migrations/0004_auto_20180622_1342.py | kurniantoska/medicalwebapp_project | a2e36a44b598ad2989c207f950a89c02d987e00d | [
"BSD-3-Clause"
] | 1 | 2019-10-22T02:12:49.000Z | 2019-10-22T02:12:49.000Z | puskesmas_app/migrations/0004_auto_20180622_1342.py | kurniantoska/medicalwebapp_project | a2e36a44b598ad2989c207f950a89c02d987e00d | [
"BSD-3-Clause"
] | 3 | 2020-06-05T18:30:35.000Z | 2021-06-10T20:31:09.000Z | puskesmas_app/migrations/0004_auto_20180622_1342.py | kurniantoska/medicalwebapp_project | a2e36a44b598ad2989c207f950a89c02d987e00d | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 2.0.6 on 2018-06-22 05:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('puskesmas_app', '0003_auto_20180622_1325'),
]
operations = [
migrations.AlterField(
model_name='pemeriksaan',
name='asma_diri',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='asma_keluarga',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='diabetes_diri',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='diabetes_keluarga',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='hipertensi_diri',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='hipertensi_keluarga',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='kanker_diri',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='kanker_keluarga',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='kolestrol_tinggi_diri',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='kolestrol_tinggi_keluarga',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='konsumsi_alkohol',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='kurang_aktifitas_fisik',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='kurang_sayur_dan_buah',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='merokok',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='penyakit_jantung_diri',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='penyakit_jantung_keluarga',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='stroke_diri',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='pemeriksaan',
name='stroke_keluarga',
field=models.NullBooleanField(),
),
]
| 31.403846 | 54 | 0.53613 | 234 | 3,266 | 7.286325 | 0.226496 | 0.211144 | 0.26393 | 0.306158 | 0.860411 | 0.860411 | 0.860411 | 0.829912 | 0.829912 | 0.78827 | 0 | 0.014882 | 0.362217 | 3,266 | 103 | 55 | 31.708738 | 0.803649 | 0.013778 | 0 | 0.742268 | 1 | 0 | 0.17009 | 0.050706 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.010309 | 0 | 0.041237 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
0cb815aeccdb12d6544f00dc3fc0cf174c6cf140 | 15,310 | py | Python | tests/test_extract_processor.py | SMK1085/drops-hull-extract-s3 | 443e7445dd9aeffc17075af222332a9addbbe657 | [
"MIT"
] | null | null | null | tests/test_extract_processor.py | SMK1085/drops-hull-extract-s3 | 443e7445dd9aeffc17075af222332a9addbbe657 | [
"MIT"
] | null | null | null | tests/test_extract_processor.py | SMK1085/drops-hull-extract-s3 | 443e7445dd9aeffc17075af222332a9addbbe657 | [
"MIT"
] | null | null | null | import os
from src.extract_processor import extract_processor
from tests import *
import unittest
from unittest.mock import Mock, patch
import requests
import responses
import pytest
import boto3
from moto import mock_s3
from datetime import datetime, timezone
import tempfile
import csv
messageMissingConfig = "Lambda function not properly configured. Missing environment variable '{0:s}'."
@mock_s3
class ExtractProcessorTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def reset_env(self):
os.environ.clear()
def test_configerror_bucketname(self):
os.environ["S3_BUCKET"] = ""
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert resp == { 'statusCode': 500, 'body': json.dumps(messageMissingConfig.format("S3_BUCKET")) }
def test_configerror_s3fileformat(self):
os.environ["S3_BUCKET"] = "hull-se-test"
os.environ["S3_FILE_FORMAT"] = "foo"
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
error_msg = "Lambda function not properly configured. File format '{0:s}' is not one of the allowed values of 'csv' or 'json'."
assert resp == { 'statusCode': 500, 'body': json.dumps(error_msg.format("foo")) }
def test_invalidrequestbody(self):
os.environ["S3_BUCKET"] = "hull-se-test"
# Compose the event body
body = { "foo": "bar" }
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert resp == { 'statusCode': 200, 'body': json.dumps("Nothing to process.") }
@responses.activate
def test_extract_is_successful(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/account_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.add(responses.GET, "https://somewhereinthecloud.local.host/extracts/account_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
s3clnt = boto3.client('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Minimum env config
os.environ["S3_BUCKET"] = "hull-se-test"
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert None != s3clnt.get_object(Bucket="hull-se-test", Key="account_extract_5e032e118ee92e9586007425.csv")
assert resp == {
'statusCode': 200,
'body': json.dumps("Successfully processed extract.")
}
@responses.activate
def test_singlefile_is_successful(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/account_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.add(responses.GET, "https://somewhereinthecloud.local.host/extracts/account_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
s3clnt = boto3.client('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Extended env config
os.environ["S3_BUCKET"] = "hull-se-test"
os.environ["S3_SINGLEFILE"] = "my-single-file"
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert None != s3clnt.get_object(Bucket="hull-se-test", Key="my-single-file.csv")
assert resp == {
'statusCode': 200,
'body': json.dumps("Successfully processed extract.")
}
@responses.activate
def test_dayfolder_is_successful(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/account_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.add(responses.GET, "https://somewhereinthecloud.local.host/extracts/account_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
s3clnt = boto3.client('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Extended env config
os.environ["S3_BUCKET"] = "hull-se-test"
os.environ["S3_DAILYFOLDER"] = "True"
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
now = datetime.now(tz=timezone.utc)
assert None != s3clnt.get_object(Bucket="hull-se-test", Key=f"{now:%Y-%m-%d}/account_extract_5e032e118ee92e9586007425.csv")
assert resp == {
'statusCode': 200,
'body': json.dumps("Successfully processed extract.")
}
@responses.activate
def test_dailysinglefile_is_successful(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/account_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.add(responses.GET, "https://somewhereinthecloud.local.host/extracts/account_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
s3clnt = boto3.client('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Extended env config
os.environ["S3_BUCKET"] = "hull-se-test"
os.environ["S3_DAILYFOLDER"] = "True"
os.environ["S3_SINGLEFILE"] = "my-daily-file"
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
now = datetime.now(tz=timezone.utc)
assert None != s3clnt.get_object(Bucket="hull-se-test", Key=f"{now:%Y-%m-%d}/my-daily-file.csv")
assert resp == {
'statusCode': 200,
'body': json.dumps("Successfully processed extract.")
}
@responses.activate
def test_singlefilewithfalsedaily_is_successful(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/account_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.add(responses.GET, "https://somewhereinthecloud.local.host/extracts/account_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
s3clnt = boto3.client('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Extended env config
os.environ["S3_BUCKET"] = "hull-se-test"
os.environ["S3_DAILYFOLDER"] = "False"
os.environ["S3_SINGLEFILE"] = "my-single-file2"
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert None != s3clnt.get_object(Bucket="hull-se-test", Key="my-single-file2.csv")
assert resp == {
'statusCode': 200,
'body': json.dumps("Successfully processed extract.")
}
@responses.activate
def test_bucketdoesntexist_fails(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/account_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.add(responses.GET, "https://somewhereinthecloud.local.host/extracts/account_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Extended env config
os.environ["S3_BUCKET"] = "foo"
os.environ["S3_SINGLEFILE"] = "my-single-file"
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/account_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert resp['statusCode'] == 500
@responses.activate
def test_userextract_is_successful(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/user_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.reset()
responses.add(responses.GET, "https://somewhereinthecloud.hull.net/extracts/user_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
s3clnt = boto3.client('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Minimum env config
os.environ["S3_BUCKET"] = "hull-se-test"
os.environ["S3_SINGLEFILE"] = "my-userextract"
os.environ['HULL_EXPORT_FIELDS'] = json.dumps(["id", "email", "name", "external_id", "segment_ids"])
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/user_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert None != s3clnt.get_object(Bucket="hull-se-test", Key="my-userextract.csv")
with tempfile.TemporaryDirectory() as tmpdir:
tmp_path = os.path.join(tmpdir, "my-userextract.csv")
s3clnt.download_file("hull-se-test", "my-userextract.csv", tmp_path)
print(tmp_path)
with open(tmp_path, 'r') as csv_file:
reader = csv.reader(csv_file)
for i, line in enumerate(reader):
if i == 0:
assert line == ["id", "email", "name", "external_id", "segment_ids", "segment_names"]
os.remove(tmp_path)
assert resp == {
'statusCode': 200,
'body': json.dumps("Successfully processed extract.")
}
@responses.activate
def test_jsonextract_is_successful(self):
# Configure the mock to fake the get request
download_content_path = os.path.join(os.path.dirname(__file__),"data/user_extract_download.json")
with open(download_content_path, 'r') as download_file:
res_body = download_file.read()
responses.reset()
responses.add(responses.GET, "https://somewhereinthecloud.hull.net/extracts/user_report/test.json", body=res_body)
# Now let's fake S3 using moto
conn = boto3.resource('s3', region_name='us-east-1')
s3clnt = boto3.client('s3', region_name='us-east-1')
# pylint: disable=no-member
conn.create_bucket(Bucket="hull-se-test")
# Minimum env config
os.environ["S3_BUCKET"] = "hull-se-test"
os.environ["S3_SINGLEFILE"] = "my-jsonextract"
os.environ["S3_FILE_FORMAT"] = "json"
os.environ['HULL_EXPORT_FIELDS'] = json.dumps(["id", "email", "name", "external_id", "segment_ids"])
# Compose the event body
body = None
file_path = os.path.join(os.path.dirname(__file__),"data/user_extract.json")
with open(file_path, "r") as outfile:
reqObj = json.load(outfile)
body = reqObj
# Invoke the handler
resp = extract_processor.handler(api_gateway_event(body), None)
# Verify assertions
assert None != s3clnt.get_object(Bucket="hull-se-test", Key="my-jsonextract.json")
with tempfile.TemporaryDirectory() as tmpdir:
tmp_path = os.path.join(tmpdir, "my-userextract.csv")
s3clnt.download_file("hull-se-test", "my-jsonextract.json", tmp_path)
with open(tmp_path, 'r') as json_file:
data = json.load(json_file)
assert 3 == len(data)
os.remove(tmp_path)
assert resp == {
'statusCode': 200,
'body': json.dumps("Successfully processed extract.")
}
| 40.935829 | 135 | 0.626061 | 1,883 | 15,310 | 4.908125 | 0.096654 | 0.02467 | 0.028132 | 0.041549 | 0.8905 | 0.874811 | 0.871673 | 0.861285 | 0.85501 | 0.85501 | 0 | 0.015177 | 0.255454 | 15,310 | 373 | 136 | 41.045576 | 0.795596 | 0.104507 | 0 | 0.708333 | 0 | 0.004167 | 0.216359 | 0.047274 | 0 | 0 | 0 | 0 | 0.083333 | 1 | 0.05 | false | 0 | 0.054167 | 0 | 0.108333 | 0.004167 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0cc049c75f6ed30fb130d7a8be585c683429645c | 4,615 | py | Python | python/tests/test_bc.py | Sohojoe/ActiveRagdollAssaultCourse | 01b8111fee5b4b33f4d7735cf83b804a1080d108 | [
"Apache-2.0"
] | 33 | 2018-07-08T17:04:54.000Z | 2021-11-25T23:19:44.000Z | python/tests/test_bc.py | Sohojoe/ActiveRagdollControllers | 5d3a307ca378844c0974e190468c9492fd54197a | [
"Apache-2.0"
] | 1 | 2018-07-11T03:21:12.000Z | 2018-07-11T03:21:12.000Z | python/tests/test_bc.py | Sohojoe/ActiveRagdollAssaultCourse | 01b8111fee5b4b33f4d7735cf83b804a1080d108 | [
"Apache-2.0"
] | 5 | 2018-07-08T17:05:10.000Z | 2021-03-02T13:24:23.000Z | import unittest.mock as mock
import pytest
import numpy as np
import tensorflow as tf
from unitytrainers.bc.models import BehavioralCloningModel
from unityagents import UnityEnvironment
from .mock_communicator import MockCommunicator
@mock.patch('unityagents.UnityEnvironment.executable_launcher')
@mock.patch('unityagents.UnityEnvironment.get_communicator')
def test_cc_bc_model(mock_communicator, mock_launcher):
tf.reset_default_graph()
with tf.Session() as sess:
with tf.variable_scope("FakeGraphScope"):
mock_communicator.return_value = MockCommunicator(
discrete_action=False, visual_inputs=0)
env = UnityEnvironment(' ')
model = BehavioralCloningModel(env.brains["RealFakeBrain"])
init = tf.global_variables_initializer()
sess.run(init)
run_list = [model.sample_action, model.policy]
feed_dict = {model.batch_size: 2,
model.sequence_length: 1,
model.vector_in: np.array([[1, 2, 3, 1, 2, 3],
[3, 4, 5, 3, 4, 5]])}
sess.run(run_list, feed_dict=feed_dict)
env.close()
@mock.patch('unityagents.UnityEnvironment.executable_launcher')
@mock.patch('unityagents.UnityEnvironment.get_communicator')
def test_dc_bc_model(mock_communicator, mock_launcher):
tf.reset_default_graph()
with tf.Session() as sess:
with tf.variable_scope("FakeGraphScope"):
mock_communicator.return_value = MockCommunicator(
discrete_action=True, visual_inputs=0)
env = UnityEnvironment(' ')
model = BehavioralCloningModel(env.brains["RealFakeBrain"])
init = tf.global_variables_initializer()
sess.run(init)
run_list = [model.sample_action, model.action_probs]
feed_dict = {model.batch_size: 2,
model.dropout_rate: 1.0,
model.sequence_length: 1,
model.vector_in: np.array([[1, 2, 3, 1, 2, 3],
[3, 4, 5, 3, 4, 5]])}
sess.run(run_list, feed_dict=feed_dict)
env.close()
@mock.patch('unityagents.UnityEnvironment.executable_launcher')
@mock.patch('unityagents.UnityEnvironment.get_communicator')
def test_visual_dc_bc_model(mock_communicator, mock_launcher):
tf.reset_default_graph()
with tf.Session() as sess:
with tf.variable_scope("FakeGraphScope"):
mock_communicator.return_value = MockCommunicator(
discrete_action=True, visual_inputs=2)
env = UnityEnvironment(' ')
model = BehavioralCloningModel(env.brains["RealFakeBrain"])
init = tf.global_variables_initializer()
sess.run(init)
run_list = [model.sample_action, model.action_probs]
feed_dict = {model.batch_size: 2,
model.dropout_rate: 1.0,
model.sequence_length: 1,
model.vector_in: np.array([[1, 2, 3, 1, 2, 3],
[3, 4, 5, 3, 4, 5]]),
model.visual_in[0]: np.ones([2, 40, 30, 3]),
model.visual_in[1]: np.ones([2, 40, 30, 3])}
sess.run(run_list, feed_dict=feed_dict)
env.close()
@mock.patch('unityagents.UnityEnvironment.executable_launcher')
@mock.patch('unityagents.UnityEnvironment.get_communicator')
def test_visual_cc_bc_model(mock_communicator, mock_launcher):
tf.reset_default_graph()
with tf.Session() as sess:
with tf.variable_scope("FakeGraphScope"):
mock_communicator.return_value = MockCommunicator(
discrete_action=False, visual_inputs=2)
env = UnityEnvironment(' ')
model = BehavioralCloningModel(env.brains["RealFakeBrain"])
init = tf.global_variables_initializer()
sess.run(init)
run_list = [model.sample_action, model.policy]
feed_dict = {model.batch_size: 2,
model.sequence_length: 1,
model.vector_in: np.array([[1, 2, 3, 1, 2, 3],
[3, 4, 5, 3, 4, 5]]),
model.visual_in[0]: np.ones([2, 40, 30, 3]),
model.visual_in[1]: np.ones([2, 40, 30, 3])}
sess.run(run_list, feed_dict=feed_dict)
env.close()
if __name__ == '__main__':
pytest.main()
| 42.731481 | 72 | 0.588949 | 509 | 4,615 | 5.10609 | 0.157171 | 0.036937 | 0.061562 | 0.110812 | 0.914198 | 0.914198 | 0.914198 | 0.914198 | 0.914198 | 0.914198 | 0 | 0.02866 | 0.304442 | 4,615 | 107 | 73 | 43.130841 | 0.780997 | 0 | 0 | 0.813187 | 0 | 0 | 0.106609 | 0.080607 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043956 | false | 0 | 0.076923 | 0 | 0.120879 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0cfa5730ebf0d5b9abd28b889e9512a691cbd074 | 159 | py | Python | devito/ir/__init__.py | BrunoMot/devito | b6e077857765b7b5fad812ec5774635ca4c6fbb7 | [
"MIT"
] | 204 | 2020-01-09T11:27:58.000Z | 2022-03-20T22:53:37.000Z | devito/ir/__init__.py | BrunoMot/devito | b6e077857765b7b5fad812ec5774635ca4c6fbb7 | [
"MIT"
] | 949 | 2016-04-25T11:41:34.000Z | 2019-12-27T10:43:40.000Z | devito/ir/__init__.py | BrunoMot/devito | b6e077857765b7b5fad812ec5774635ca4c6fbb7 | [
"MIT"
] | 131 | 2020-01-08T17:43:13.000Z | 2022-03-27T11:36:47.000Z | from devito.ir.support import * # noqa
from devito.ir.equations import * # noqa
from devito.ir.clusters import * # noqa
from devito.ir.iet import * # noqa
| 31.8 | 41 | 0.72327 | 24 | 159 | 4.791667 | 0.375 | 0.347826 | 0.417391 | 0.521739 | 0.573913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.176101 | 159 | 4 | 42 | 39.75 | 0.877863 | 0.119497 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
0b6e49a2bbcf8e3c0587d88c98c4f5e565e2b358 | 10,674 | py | Python | app.py | ycp217/algoviz | b80ee8ffe6d99526feb6a8f62ccdecd69442c450 | [
"MIT"
] | null | null | null | app.py | ycp217/algoviz | b80ee8ffe6d99526feb6a8f62ccdecd69442c450 | [
"MIT"
] | null | null | null | app.py | ycp217/algoviz | b80ee8ffe6d99526feb6a8f62ccdecd69442c450 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, request, jsonify, Response
import json, datetime
from bson import json_util
from bson.objectid import ObjectId
from pymongo import Connection
from flask_mime import Mime
from cors import crossdomain
app = Flask(__name__)
mimetype = Mime(app)
connection = Connection('localhost', 27017)
db = connection.foo
def toJson(data):
return json.dumps(data, default=json_util.default)
@app.route('/api/login', methods=['POST', 'OPTIONS'])
@crossdomain(origin='*', headers='Content-Type')
def login():
if request.method == 'POST':
js = request.json
netid = js['netid']
pw = js['password']
res = db['people'].find_one({"data.attributes.netid": netid, "data.attributes.password": pw})
if res == None:
return Response(json.dumps({'error': 'invalid login'}), status=400, mimetype='application/json')
else:
return Response(toJson(res), status=200, mimetype='application/json')
else:
return Response(json.dumps({"error": "NOT FOUND"}), status=404, mimetype='application/json')
@app.route('/api/people', methods=['GET', 'POST', 'OPTIONS'])
@crossdomain(origin='*', headers='Content-Type')
def people():
if request.method == 'GET':
results = db['people'].find()
json_results = []
for result in results:
json_results.append(result)
return Response(toJson(json_results), status=200, mimetype='application/json')
if request.method == 'POST':
js = request.json
netid = js['data']['attributes']['netid']
res = db['people'].find_one({"data.attributes.netid": netid })
if res == None:
res = db['people'].insert(js)
return Response(toJson(res), status=200, mimetype='application/json')
else:
return Response(json.dumps({"error": "person already exists"}), status=400, mimetype='application/json')
else:
return Response(json.dumps({"error": "NOT FOUND"}), status=404, mimetype='application/json')
@app.route('/api/people/<person_id>', methods=['GET', 'PATCH', 'OPTIONS'])
@crossdomain(origin='*', headers='Content-Type')
def person(person_id):
if request.method == 'GET':
result = db['people'].find_one({'_id': ObjectId(person_id)})
return Response(toJson(result), status=200, mimetype='application/json')
if request.method == 'PATCH':
js = request.json
result = db['people'].find_one({'_id': ObjectId(person_id)})
if result == None:
return Response(json.dumps({"error": "person_id doesn't exist"}), status=400, mimetype='application/json')
else:
result = toJson(result)
# add attributes
try:
attributes = js['data']['attributes']
name = attributes.get('name', None)
nNumber = attributes.get('nNumber', None)
netid = attributes.get('netid', None)
password = attributes.get('password', None)
if name != None: db.people.update({"_id": ObjectId(person_id)}, {"$set": { "data.attributes.name": name }})
if nNumber != None: db.people.update({"_id": ObjectId(person_id)}, {"$set": { "data.attributes.nNumber": nNumber }})
if netid != None: db.people.update({"_id": ObjectId(person_id)}, {"$set": { "data.attributes.netid": netid }})
if password != None: db.people.update({"_id": ObjectId(person_id)}, {"$set": { "data.attributes.password": password }})
return Response(json.dumps({}), status=400, mimetype='application/json')
except KeyError:
return Response(json.dumps({"error": "json format not accurate"}), status=400, mimetype='application/json')
# add links
else:
return Response(json.dumps({"error": "NOT FOUND"}), status=404, mimetype='application/json')
@app.route('/api/sell_posts', methods=['GET', 'POST', 'OPTIONS'])
@crossdomain(origin='*', headers='Content-Type')
def sell_posts():
if request.method == 'GET':
results = db['sell_posts'].find()
json_results = []
for result in results:
json_results.append(result)
return Response(toJson(json_results), status=200, mimetype='application/json')
if request.method == 'POST':
js = request.json
seller_id = js['links']['seller']['_id']
res = db['people'].find_one({"_id": ObjectId(seller_id)})
if res == None:
return Response(json.dumps({"error": "seller_id does not exist"}), status=404, mimetype='application/json')
else:
offset = js['data']['attributes']['days_until_expiration']
now = datetime.datetime.now()
diff = datetime.timedelta(days=offset)
expired_by = now + diff
js['data']['attributes']['expired_by'] = expired_by
js['data']['attributes']['fulfilled'] = False
if js['data']['attributes']['price'] != None and js['data']['attributes']['expired_by'] != None and len(js['data']['attributes']['locations']) > 0 and js['data']['type'] == 'sell_posts':
res = db['sell_posts'].insert(js)
post_id = json.loads(toJson(res))['$oid']
if post_id != None:
db.people.update({"_id": ObjectId(seller_id)}, {"$push": { "links.sell_posts": { "_id": post_id } }})
return Response(toJson(res), status=200, mimetype='application/json')
else:
return Response(json.dumps({"error": "missing required info"}), status=404, mimetype='application/json')
else:
return Response(json.dumps({"error": "NOT FOUND"}), status=404, mimetype='application/json')
@app.route('/api/sell_posts/<post_id>', methods=['GET', 'DELETE'])
@crossdomain(origin='*', headers='Content-Type')
def sell_post(post_id):
if request.method == 'GET':
result = db['sell_posts'].find_one({'_id': ObjectId(post_id)})
return Response(toJson(result), status=200, mimetype='application/json')
if request.method == 'DELETE':
result = db['sell_posts'].remove({'_id': ObjectId(post_id)})
return Response(toJson(result), status=200, mimetype='application/json')
else:
return Response(json.dumps({"error": "NOT FOUND"}), status=404, mimetype='application/json')
@app.route('/api/sell_posts/nearest/<post_id>', methods=['GET'])
@crossdomain(origin='*', headers='Content-Type')
def find_best_buy(post_id):
result = db['sell_posts'].find_one({'_id': ObjectId(post_id)})
js = toJson(result)
us = json_util.loads(js)
price = us['data']['attributes']['price']
date = us['data']['attributes']['expired_by']
locations = us['data']['attributes']['locations']
seller = us['links']['seller']['_id']
used_map = {}
for location in locations:
res = db.buy_posts.find( {"data.attributes.locations": location, "data.attributes.expired_by": {"$gte": date }})
for i in range(0, res.count()):
used_map[res[i]['_id']] = res[i]
final_ret = []
for key in used_map.keys():
final_ret.append(used_map[key])
return Response(toJson(final_ret), status=200, mimetype='application/json')
@app.route('/api/buy_posts', methods=['GET', 'POST', 'OPTIONS'])
@crossdomain(origin='*', headers='Content-Type')
def buy_posts():
if request.method == 'GET':
results = db['buy_posts'].find()
json_results = []
for result in results:
json_results.append(result)
return toJson(json_results)
if request.method == 'POST':
js = request.json
buyer_id = js['links']['buyer']['_id']
res = db['people'].find_one({"_id": ObjectId(buyer_id)})
if res == None:
return Response(json.dumps({"error": "buyer_id does not exist"}), status=404, mimetype='application/json')
else:
offset = js['data']['attributes']['days_until_expiration']
now = datetime.datetime.now()
diff = datetime.timedelta(days=offset)
expired_by = now + diff
js['data']['attributes']['expired_by'] = expired_by
js['data']['attributes']['fulfilled'] = False
print js['data']['type']
if js['data']['attributes']['price'] != None and js['data']['attributes']['expired_by'] != None and len(js['data']['attributes']['locations']) > 0 and js['data']['type'] == 'buy_posts':
res = db['buy_posts'].insert(js)
post_id = json.loads(toJson(res))['$oid']
if post_id != None:
db.people.update({"_id": ObjectId(buyer_id)}, {"$push": { "links.buy_posts": { "_id": post_id } }})
return Response(toJson(res), status=200, mimetype='application/json')
else:
return Response(json.dumps({"error": "missing required info"}), status=404, mimetype='application/json')
else:
return Response(json.dumps({"error": "NOT FOUND"}), status=404, mimetype='application/json')
@app.route('/api/buy_posts/<post_id>', methods=['GET', 'DELETE'])
@crossdomain(origin='*', headers='Content-Type')
def buy_post(post_id):
if request.method == 'GET':
result = db['buy_posts'].find_one({'_id': ObjectId(post_id)})
return Response(toJson(result), status=200, mimetype='application/json')
if request.method == 'DELETE':
result = db['buy_posts'].remove({'_id': ObjectId(post_id)})
return Response(toJson(result), status=200, mimetype='application/json')
else:
return Response(json.dumps({"error": "NOT FOUND"}), status=404, mimetype='application/json')
@app.route('/api/buy_posts/nearest/<post_id>', methods=['GET'])
@crossdomain(origin='*', headers='Content-Type')
def find_best_sell(post_id):
result = db['buy_posts'].find_one({'_id': ObjectId(post_id)})
js = toJson(result)
us = json_util.loads(js)
price = us['data']['attributes']['price']
date = us['data']['attributes']['expired_by']
locations = us['data']['attributes']['locations']
buyer = us['links']['buyer']['_id']
used_map = {}
for location in locations:
res = db.sell_posts.find( {"data.attributes.locations": location, "data.attributes.expired_by": {"$gte": date }})
for i in range(0, res.count()):
used_map[res[i]['_id']] = res[i]
final_ret = []
for key in used_map.keys():
final_ret.append(used_map[key])
return Response(toJson(final_ret), status=200, mimetype='application/json')
if __name__ == '__main__':
app.debug = True
app.run()
| 47.022026 | 198 | 0.608113 | 1,275 | 10,674 | 4.967059 | 0.108235 | 0.06853 | 0.105321 | 0.058108 | 0.820622 | 0.803884 | 0.786673 | 0.75904 | 0.72667 | 0.661614 | 0 | 0.011422 | 0.212573 | 10,674 | 226 | 199 | 47.230089 | 0.742058 | 0.002248 | 0 | 0.596059 | 0 | 0 | 0.22194 | 0.038978 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.019704 | 0.034483 | null | null | 0.004926 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0b6e4f0b71e24bdefb947613fc8b5ee5296cac6f | 78 | py | Python | async_test_suite/__init__.py | yuvalez/async_test_suite | 2dc6db8e7d3beb05e658e51487a753176f27e6ac | [
"MIT"
] | null | null | null | async_test_suite/__init__.py | yuvalez/async_test_suite | 2dc6db8e7d3beb05e658e51487a753176f27e6ac | [
"MIT"
] | null | null | null | async_test_suite/__init__.py | yuvalez/async_test_suite | 2dc6db8e7d3beb05e658e51487a753176f27e6ac | [
"MIT"
] | null | null | null | from .async_suite import AsyncTestSuite
from .async_suite import AsyncTestCase | 39 | 39 | 0.884615 | 10 | 78 | 6.7 | 0.6 | 0.268657 | 0.41791 | 0.597015 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089744 | 78 | 2 | 40 | 39 | 0.943662 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
0b7999cbb9b40818d8ceaa139f1d059a5cf88ae7 | 39 | py | Python | lib/gameoflife/__init__.py | ideasculptor/gameoflife | f83c24cb00b575a9b1a67bc307fec1b888791e60 | [
"MIT"
] | null | null | null | lib/gameoflife/__init__.py | ideasculptor/gameoflife | f83c24cb00b575a9b1a67bc307fec1b888791e60 | [
"MIT"
] | null | null | null | lib/gameoflife/__init__.py | ideasculptor/gameoflife | f83c24cb00b575a9b1a67bc307fec1b888791e60 | [
"MIT"
] | null | null | null | from . import Game
from . import Board
| 13 | 19 | 0.74359 | 6 | 39 | 4.833333 | 0.666667 | 0.689655 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.205128 | 39 | 2 | 20 | 19.5 | 0.935484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f001520bb6c761f99ea963115d08952a6cb38dd8 | 48,299 | py | Python | preprocessing.py | wonderseen/OVC-MMT | b982038ea1295cc038b8dcbca11aa81d318f7a49 | [
"MIT"
] | 5 | 2021-02-25T03:12:01.000Z | 2022-03-04T15:17:45.000Z | preprocessing.py | wonderseen/OVC-MMT | b982038ea1295cc038b8dcbca11aa81d318f7a49 | [
"MIT"
] | 1 | 2021-02-25T05:42:31.000Z | 2022-01-02T17:54:16.000Z | preprocessing.py | wonderseen/OVC-MMT | b982038ea1295cc038b8dcbca11aa81d318f7a49 | [
"MIT"
] | null | null | null | """
This program file contains all the functions implemented to load and preprocess
the dataset for OVC
"""
import torch
from torch.autograd import Variable
import torch.nn as nn
import math
import numpy as np
from nltk.tokenize import sent_tokenize
import re
from machine_translation_vision.samplers import BucketBatchSampler
from collections import Counter
import unicodedata
import random
## Define a couple of parameters
SOS_token = 2
EOS_token = 3
UNK_token = 1
PAD_Token = 0
use_cuda = torch.cuda.is_available()
#Load the dataset in a text file located with data_path
def load_data(data_path):
with open(data_path,'r') as f:
data = [line.strip() for line in f.readlines()]
return data
def quick_sort(lists):
if not lists:
return []
assert isinstance(lists, list)
if len(lists) == 1:
return lists
pivot = lists.pop()
llist, rlist = [], []
for x in lists:
if x > pivot:
rlist.append(x)
else:
llist.append(x)
return quick_sort(llist) + [pivot] + quick_sort(rlist)
def heap_struct(alist):
alist.reverse()
for i in range(int((len(alist)-1)/2)):
if len(alist) < 2*i+1:
break
if alist[i] < alist[2*i+1]:
alist[i], alist[2*i+1] = alist[2*i+1], alist[i]
if alist[2*i+1] < alist[2*i+2]:
alist[2*i+2], alist[2*i+1] = alist[2*i+1], alist[2*i+2]
return alist
def heap_topk(s, k):
topk = quick_sort(s[:k])
for x in s[k:]:
if x<topk[-1]:
topk[-1] = x
topk = heap_struct(topk)
topk.reverse()
return topk
def load_VI(data_path, target_sentence):
with open(data_path,'r') as f:
data = f.readlines()
VIS = [[float(v) for v in vi.split()] for vi in data]
new_VIS = []
try:
for id, (VI, sent) in enumerate(zip(VIS, target_sentence)):
pos = -1
new_vi = []
tokens = sent.split()
if tokens[-1] == 't.' and tokens[-2] in ['bar@@', 'dar@@']:## noisy case of the bpe tokenizer, bart. -> bar@@ t.
VI = VI[:-1]
##
for i, vi in enumerate(VI):
##
temp_pos, bpe_length = pos, 1
while tokens[temp_pos].endswith('@@'):
temp_pos += 1
bpe_length += 1
vi /= float(bpe_length)
##
pos += 1
new_vi.append(vi)
while tokens[pos].endswith('@@'):
pos += 1
new_vi.append(vi)
if pos == len(tokens)-1: break
assert pos == len(tokens)-1, (pos, len(tokens)-1)
assert len(tokens) == len(new_vi), (len(tokens), len(new_vi))
assert i==len(VI)-1, (i, len(VI)-1)
## top k
# top_k = heap_topk(new_vi, 5)
# top_k = list(Counter(top_k).keys())
# if 0. in top_k: top_k.remove(0.)
# if top_k is not None:
# new_vi = [vi if vi in top_k else 0. for vi in new_vi]
# print(id)
# print(new_vi)
# print(tokens)
new_VIS.append(new_vi)
except BaseException:
print(id, len(VI), VI)
print(len(tokens), sent)
assert len(target_sentence) == len(new_VIS), (len(target_sentence), len(new_VIS), len(VIS))
return new_VIS
def format_data(data_x, data_y, IKEA=False):
if not IKEA:
data=[[x.strip(), y.strip()] for x, y in zip(data_x, data_y)]
else:
data=[]
for x, y in zip(data_x, data_y):
##conver the paragraph into sentences
x_s = sent_tokenize(x)
y_s = sent_tokenize(y)
## Check if len of the list is the same
if len(x_s) == len(y_s):
data += [[x.strip(), y.strip()] for x, y in zip(x_s, y_s)]
return data
#Construct Word2Id and Id2Word Dictionaries from a loaded vocab file
def construct_vocab_dic(vocab):
word2id = {}
id2word = {}
for i,word in enumerate(vocab):
word2id[word.strip()] = i + 1
id2word[i + 1] = word.strip()
return word2id,id2word
def _run_strip_accents(text):
"""Strips accents from a piece of text."""
text = unicodedata.normalize("NFD", text)
output = []
for char in text:
cat = unicodedata.category(char)
if cat == "Mn":
continue
output.append(char)
return "".join(output)
# Filter out the pairs that has a sentence having
def data_filter(data, max_length):
new_data = []
for d in data:
if len(d[0].split()) <= max_length and len(d[1].split()) <= max_length:
new_data.append(d)
return new_data
missing_words = []
def indexes_from_sentence(vocab, sentence, drop_unk=False):
words = sentence.split(' ')
for i, word in enumerate(words):
if word not in vocab.keys():
if _run_strip_accents(word) in vocab.keys():
words[i] = _run_strip_accents(word)
else:
if word not in missing_words:
missing_words.append(word)
indexes = [vocab.get(word, UNK_token) for word in words]
if drop_unk: indexes = [i for i in indexes if i != UNK_token]
return indexes
def variable_from_sentence(vocab, sentence):
indexes = (vocab, sentence)
indexes.append(EOS_token)
var = Variable(torch.LongTensor(indexes).view(-1, 1))
if use_cuda:
var = var.cuda()
return var
def variables_from_pair(pair,s_vocab, t_vocab):
input_variable = variable_from_sentence(s_vocab, pair[0])
target_variable = variable_from_sentence(t_vocab, pair[1])
return (input_variable, target_variable)
# Create data pairs with each pair represented by corresponding wordids in each language.
def create_data_index(pairs, source_vocab, target_vocab, drop_unk=False):
source_indexes = [indexes_from_sentence(source_vocab, x[0], drop_unk=drop_unk) + [EOS_token] for x in pairs]
target_indexes = [indexes_from_sentence(target_vocab, x[1], drop_unk=drop_unk) + [EOS_token] for x in pairs]
return [[s, t] for s, t in zip(source_indexes, target_indexes)]
def create_data_index_VI(pairs, source_vocab, target_vocab, drop_unk=False):
source_indexes = [indexes_from_sentence(source_vocab, x[0], drop_unk=drop_unk) + [EOS_token] for x in pairs]
target_indexes = [indexes_from_sentence(target_vocab, x[1], drop_unk=drop_unk) + [EOS_token] for x in pairs]
vis = [x[2] + [0.] for x in pairs]
return [[s, t, vi] for s, t, vi in zip(source_indexes, target_indexes, vis)]
# Pad a with the PAD symbol
def pad_seq(seq, max_length):
seq_new = seq + [0 for i in range(max_length - len(seq))]
return seq_new
def data_generator(data_pairs, batch_size):
"""
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
data_size = len(data_pairs)
num_batches = math.floor(data_size / batch_size)
for i in range(0, data_size, batch_size):
if i+batch_size <= data_size:
batch_data_x = [d[0] for d in data_pairs[i:i+batch_size]]
batch_data_y = [d[1] for d in data_pairs[i:i+batch_size]]
else:
batch_data_x = [d[0] for d in data_pairs[i:data_size]]
batch_data_y = [d[1] for d in data_pairs[i:data_size]]
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
# Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
# Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
# Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
# Generate batch_x and batch_y
batch_x = Variable(torch.LongTensor(batch_x_pad_sorted))
batch_y = Variable(torch.LongTensor(batch_y_pad_sorted))
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
# Yield the batch data|
yield batch_x, \
batch_y, \
list(reversed(sorted(batch_x_lengths))), \
batch_y_lengths_sorted, \
x_reverse_sorted_index
def data_generator_tl(data_pairs, batch_size):
"""
This is an implementation of generating batches such that the target sentences always have
the same length. We borrow the bucket sampler from nmtpytorch to generate the corresponding index,
such that we can have this corresponding data_pair.
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
# Get the lengths of the target language
tl_lengths = [len(x[1]) for x in data_pairs]
# Initialize the index sampler
data_sampler = BucketBatchSampler(tl_lengths,batch_size)
# Iterate through the index sampler
for bidx in data_sampler.__iter__():
batch_data_x = [d[0] for d in [data_pairs[y] for y in bidx]]
batch_data_y = [d[1] for d in [data_pairs[y] for y in bidx]]
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
# Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
# Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
# Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
# Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
# Generate batch_x and batch_y
batch_x, batch_y = Variable(torch.LongTensor(batch_x_pad_sorted)), Variable(torch.LongTensor(batch_y_pad_sorted))
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
# Yield the batch data|
yield batch_x, batch_y, list(reversed(sorted(batch_x_lengths))), batch_y_lengths_sorted
def data_generator_single(batch_data_x):
x_length = max([len(x) for x in batch_data_x])
# Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
# Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
# Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = list(reversed(x_sorted_index))
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
batch_x = Variable(torch.LongTensor(batch_x_pad_sorted))
if use_cuda:
batch_x = batch_x.cuda()
return batch_x,list(reversed(sorted(batch_x_lengths))),x_reverse_sorted_index
def data_generator_mtv(data_pairs, data_im, batch_size):
"""
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
data_size = len(data_pairs)
num_batches = math.floor(data_size / batch_size)
for i in range(0, data_size, batch_size):
if i+batch_size <= data_size:
batch_data_x = [d[0] for d in data_pairs[i:i+batch_size]]
batch_data_y = [d[1] for d in data_pairs[i:i+batch_size]]
batch_data_im = torch.from_numpy(data_im[i:i+batch_size])
else:
batch_data_x = [d[0] for d in data_pairs[i:data_size]]
batch_data_y = [d[1] for d in data_pairs[i:data_size]]
batch_data_im = torch.from_numpy(data_im[i:data_size])
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
#Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
#Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
#Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
#Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
#Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
#Generate batch_x and batch_y
batch_x, batch_y = Variable(torch.LongTensor(batch_x_pad_sorted)), Variable(torch.LongTensor(batch_y_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_im = batch_im.cuda()
#Yield the batch data|
yield batch_x, batch_y, batch_im, list(reversed(sorted(batch_x_lengths))), batch_y_lengths_sorted, x_reverse_sorted_index
def data_generator_bta_mtv(data_pairs, data_im, data_bta_im, batch_size):
"""
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
data_bta_im:
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
data_size = len(data_pairs)
num_batches = math.floor(data_size / batch_size)
for i in range(0, data_size, batch_size):
if i+batch_size <= data_size:
batch_data_x = [d[0] for d in data_pairs[i:i+batch_size]]
batch_data_y = [d[1] for d in data_pairs[i:i+batch_size]]
batch_data_im = torch.from_numpy(data_im[i:i+batch_size])
batch_data_bta_im = torch.from_numpy(data_bta_im[i:i+batch_size])
else:
batch_data_x = [d[0] for d in data_pairs[i:data_size]]
batch_data_y = [d[1] for d in data_pairs[i:data_size]]
batch_data_im = torch.from_numpy(data_im[i:data_size])
batch_data_bta_im = torch.from_numpy(data_bta_im[i:i+batch_size])
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
#Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
#Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
#Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
#Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
#Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
batch_bta_im_sorted = torch.zeros_like(batch_data_bta_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
batch_bta_im_sorted[i] = batch_data_bta_im[x]
#Generate batch_x and batch_y
batch_x, batch_y = Variable(torch.LongTensor(batch_x_pad_sorted)), Variable(torch.LongTensor(batch_y_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
batch_bta_im = Variable(batch_bta_im_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_im = batch_im.cuda()
batch_bta_im = batch_bta_im.cuda()
#Yield the batch data|
yield batch_x, batch_y, batch_im, batch_bta_im, list(reversed(sorted(batch_x_lengths))), batch_y_lengths_sorted,x_reverse_sorted_index
def data_generator_tl_mtv_bta_vi_shuffle(data_pairs, data_im, data_bta_im, batch_size):
"""
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
data_bta_im:
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
dt = [(a,b,c) for a,b,c in zip(data_pairs, data_im, data_bta_im)]
random.shuffle(dt)
data_pairs = [a[0] for a in dt]
data_im = np.array([a[1] for a in dt])
data_bta_im = np.array([a[2] for a in dt])
data_size = len(data_pairs)
num_batches = math.floor(data_size/batch_size)
for i in range(0,data_size,batch_size):
if i+batch_size <= data_size:
batch_data_x = [d[0] for d in data_pairs[i:i+batch_size]]
batch_data_y = [d[1] for d in data_pairs[i:i+batch_size]]
batch_data_vi = [d[2] for d in data_pairs[i:i+batch_size]]
batch_data_im = torch.from_numpy(data_im[i:i+batch_size])
batch_data_bta_im = torch.from_numpy(data_bta_im[i:i+batch_size])
else:
batch_data_x = [d[0] for d in data_pairs[i:data_size]]
batch_data_y = [d[1] for d in data_pairs[i:data_size]]
batch_data_vi = [d[2] for d in data_pairs[i:data_size]]
batch_data_im = torch.from_numpy(data_im[i:data_size])
batch_data_bta_im = torch.from_numpy(data_bta_im[i:i+batch_size])
# The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
# Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
batch_vi_pad = []
# Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens, x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
# Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
# Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens, vi in zip(batch_data_y, batch_data_vi):
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens, y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
vi_pad_seq = pad_seq(vi,y_length)
batch_vi_pad.append(vi_pad_seq)
# Reorder the lengths
batch_vi_pad_sorted =[batch_vi_pad[i] for i in x_reverse_sorted_index]
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
# Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
batch_bta_im_sorted = torch.zeros_like(batch_data_bta_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
batch_bta_im_sorted[i] = batch_data_bta_im[x]
# Generate batch_x and batch_y
batch_x, batch_y, batch_vi = Variable(torch.LongTensor(batch_x_pad_sorted)), Variable(torch.LongTensor(batch_y_pad_sorted)),Variable(torch.FloatTensor(batch_vi_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
batch_bta_im = Variable(batch_bta_im_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_vi = batch_vi.cuda()
batch_im = batch_im.cuda()
batch_bta_im = batch_bta_im.cuda()
#Yield the batch data|
yield batch_x, batch_y, batch_vi, batch_im, batch_bta_im, list(reversed(sorted(batch_x_lengths))), batch_y_lengths_sorted
def data_generator_tl_mtv(data_pairs, data_im, batch_size):
"""
This is an implementation of generating batches such that the target sentences always have
the same length. We borrow the bucket sampler from nmtpytorch to generate the corresponding index,
such that we can have this corresponding data_pair.
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
# Get the lengths of the target language
tl_lengths = [len(x[1]) for x in data_pairs]
# Initialize the index sampler
data_sampler = BucketBatchSampler(tl_lengths,batch_size)
# Iterate through the index sampler
for bidx in data_sampler.__iter__():
batch_data_x = [d[0] for d in [data_pairs[y] for y in bidx]]
batch_data_y = [d[1] for d in [data_pairs[y] for y in bidx]]
#Get the corresponding image as well
batch_data_im = torch.from_numpy(data_im[bidx])
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
#Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
#Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
#Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
#Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
#Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
#Generate batch_x and batch_y
batch_x, batch_y = Variable(torch.LongTensor(batch_x_pad_sorted)), Variable(torch.LongTensor(batch_y_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_im = batch_im.cuda()
#Yield the batch data|
yield batch_x, batch_y,batch_im, list(reversed(sorted(batch_x_lengths))), batch_y_lengths_sorted
def data_generator_tl_mtv_vi(data_pairs, data_im, batch_size):
"""
This is an implementation of generating batches such that the target sentences always have
the same length. We borrow the bucket sampler from nmtpytorch to generate the corresponding index,
such that we can have this corresponding data_pair.
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
#Get the lengths of the target language
tl_lengths = [len(x[1]) for x in data_pairs]
#Initialize the index sampler
data_sampler = BucketBatchSampler(tl_lengths, batch_size)
#Iterate through the index sampler
for bidx in data_sampler.__iter__():
batch_data_x = [d[0] for d in [data_pairs[y] for y in bidx]]
batch_data_y = [d[1] for d in [data_pairs[y] for y in bidx]]
batch_data_vi = [d[2] for d in [data_pairs[y] for y in bidx]]
#Get the corresponding image as well
batch_data_im = torch.from_numpy(data_im[bidx])
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
vi_length = max([len(vi) for vi in batch_data_vi])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
batch_vi_pad = []
batch_vi_lengths = []
#Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens, x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
#Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
#Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
#Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
#Pad data_vi and reorder it with respect to the x_reverse_sorted_index
for vi in batch_data_vi:
vi_l = len(vi)
vi_pad_seq = pad_seq(vi,vi_length)
batch_vi_lengths.append(vi_l)
batch_vi_pad.append(vi_pad_seq)
#Reorder the lengths
batch_vi_pad_sorted =[batch_vi_pad[i] for i in x_reverse_sorted_index]
batch_vi_lengths_sorted = [batch_vi_lengths[i] for i in x_reverse_sorted_index]
#Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
#Generate batch_x and batch_y
batch_x, batch_y, batch_vi = Variable(torch.LongTensor(batch_x_pad_sorted)),\
Variable(torch.LongTensor(batch_y_pad_sorted)),\
Variable(torch.FloatTensor(batch_vi_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_vi = batch_vi.cuda()
batch_im = batch_im.cuda()
#Yield the batch data|
yield batch_x,batch_y,batch_vi,batch_im,list(reversed(sorted(batch_x_lengths))),batch_y_lengths_sorted
def data_generator_tl_mtv_bta_vi(data_pairs, data_im, data_bta_im, batch_size):
"""
This is an implementation of generating batches such that the target sentences always have
the same length. We borrow the bucket sampler from nmtpytorch to generate the corresponding index,
such that we can have this corresponding data_pair.
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
#Get the lengths of the target language
tl_lengths = [len(x[1]) for x in data_pairs]
#Initialize the index sampler
data_sampler = BucketBatchSampler(tl_lengths,batch_size)
#Iterate through the index sampler
for bidx in data_sampler.__iter__():
batch_data_x = [d[0] for d in [data_pairs[y] for y in bidx]]
batch_data_y = [d[1] for d in [data_pairs[y] for y in bidx]]
batch_data_vi = [d[2] for d in [data_pairs[y] for y in bidx]]
#Get the corresponding image as well
batch_data_im = torch.from_numpy(data_im[bidx])
batch_data_bta_im = torch.from_numpy(data_bta_im[bidx])
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
vi_length = max([len(vi) for vi in batch_data_vi])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
batch_vi_pad = []
batch_vi_lengths = []
#Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
#Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
#Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
#Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
#Pad data_vi and reorder it with respect to the x_reverse_sorted_index
for vi in batch_data_vi:
vi_l = len(vi)
vi_pad_seq = pad_seq(vi,vi_length)
batch_vi_lengths.append(vi_l)
batch_vi_pad.append(vi_pad_seq)
#Reorder the lengths
batch_vi_pad_sorted =[batch_vi_pad[i] for i in x_reverse_sorted_index]
batch_vi_lengths_sorted = [batch_vi_lengths[i] for i in x_reverse_sorted_index]
#Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
batch_im_bta_sorted = torch.zeros_like(batch_data_bta_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
batch_im_bta_sorted[i] = batch_data_bta_im[x]
#Generate batch_x and batch_y
batch_x, batch_y, batch_vi = Variable(torch.LongTensor(batch_x_pad_sorted)),\
Variable(torch.LongTensor(batch_y_pad_sorted)),\
Variable(torch.FloatTensor(batch_vi_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
batch_bta_im = Variable(batch_im_bta_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_vi = batch_vi.cuda()
batch_im = batch_im.cuda()
batch_bta_im = batch_bta_im.cuda()
#Yield the batch data|
yield batch_x,\
batch_y,\
batch_vi,\
batch_im,\
batch_bta_im,\
list(reversed(sorted(batch_x_lengths))),\
batch_y_lengths_sorted
def data_generator_tl_mtv_imretrieval(data_pairs, data_im, batch_size):
"""
This is an implementation of generating batches such that the target sentences always have
the same length. We borrow the bucket sampler from nmtpytorch to generate the corresponding index,
such that we can have this corresponding data_pair.
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
#Get the lengths of the target language
tl_lengths = [len(x[1]) for x in data_pairs]
#Initialize the index sampler
data_sampler = BucketBatchSampler(tl_lengths, batch_size)
#Iterate through the index sampler
for bidx in data_sampler.__iter__():
#print(bidx)
batch_data_x = [d[0] for d in [data_pairs[y] for y in bidx]]
batch_data_y = [d[1] for d in [data_pairs[y] for y in bidx]]
#Get the corresponding image as well
batch_data_im = torch.from_numpy(data_im[bidx])
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
#Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
#Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
#Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
#Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
#Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
#Generate batch_x and batch_y
batch_x, batch_y = Variable(torch.LongTensor(batch_x_pad_sorted)), Variable(torch.LongTensor(batch_y_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_im = batch_im.cuda()
index_retrieval = [bidx[x] for x in x_reverse_sorted_index]
#print(index_retrieval)
#Yield the batch data|
yield batch_x, batch_y, batch_im, list(reversed(sorted(batch_x_lengths))), index_retrieval
def data_generator_tl_mtv_imretrieval_bta(data_pairs, data_im, data_bta_im, batch_size):
"""
This is an implementation of generating batches such that the target sentences always have
the same length. We borrow the bucket sampler from nmtpytorch to generate the corresponding index,
such that we can have this corresponding data_pair.
Input:
data_pairs: List of pairs, [[data_1,target_1],[data_2,target_2],...], where data_1 and target_1 are id_indexs from 1 to their own vocabulary size. The end of each instance whould end with a EOS_token index.
batch_size: The size of the batch
data_im: The numpy matrix which contains the image features. Size: (N,I), N is the number of samples and I is the image feature size
output:
batch_x: Variable with size: B*Lx
batch_y: Variable with size: B*Ly
batch_x_lengths: A list witch contains the length of each source language sentence in the batch
batch_y_lengths: A list witch contains the length of each target language sentence in the batch
x_reverse_sorted_index: A list of index that represents the sorted batch with respect to the instance length.
"""
#Get the lengths of the target language
tl_lengths = [len(x[1]) for x in data_pairs]
#Initialize the index sampler
data_sampler = BucketBatchSampler(tl_lengths,batch_size)
#Iterate through the index sampler
for bidx in data_sampler.__iter__():
batch_data_x = [d[0] for d in [data_pairs[y] for y in bidx]]
batch_data_y = [d[1] for d in [data_pairs[y] for y in bidx]]
#Get the corresponding image as well
batch_data_im = torch.from_numpy(data_im[bidx])
batch_data_bta_im = torch.from_numpy(data_bta_im[bidx])
#The lengths for data and labels to be padded to
x_length = max([len(x) for x in batch_data_x])
y_length = max([len(y) for y in batch_data_y])
#Get a list of tokens
batch_x_pad = []
batch_x_lengths = []
batch_y_pad = []
batch_y_lengths = []
#Updated batch_x_lengths, batch_x_pad
for x_tokens in batch_data_x:
x_l = len(x_tokens)
x_pad_seq = pad_seq(x_tokens,x_length)
batch_x_lengths.append(x_l)
batch_x_pad.append(x_pad_seq)
#Reorder the lengths
x_sorted_index = list(np.argsort(batch_x_lengths))
x_reverse_sorted_index = [x for x in reversed(x_sorted_index)]
batch_x_pad_sorted = [batch_x_pad[i] for i in x_reverse_sorted_index]
#print(x_reverse_sorted_index)
#Pad data_y and reorder it with respect to the x_reverse_sorted_index
for y_tokens in batch_data_y:
y_l = len(y_tokens)
y_pad_seq = pad_seq(y_tokens,y_length)
batch_y_lengths.append(y_l)
batch_y_pad.append(y_pad_seq)
#Reorder the lengths
batch_y_pad_sorted =[batch_y_pad[i] for i in x_reverse_sorted_index]
batch_y_lengths_sorted = [batch_y_lengths[i] for i in x_reverse_sorted_index]
#Reorder the image numpy matrix with respect to the x_reverse_sorted_index
batch_im_sorted = torch.zeros_like(batch_data_im)
batch_bta_im_sorted = torch.zeros_like(batch_data_bta_im)
for i,x in enumerate(x_reverse_sorted_index):
batch_im_sorted[i] = batch_data_im[x]
batch_bta_im_sorted[i] = batch_data_bta_im[x]
#Generate batch_x and batch_y
batch_x, batch_y = Variable(torch.LongTensor(batch_x_pad_sorted)), Variable(torch.LongTensor(batch_y_pad_sorted))
batch_im = Variable(batch_im_sorted.float())
batch_bta_im = Variable(batch_bta_im_sorted.float())
if use_cuda:
batch_x = batch_x.cuda()
batch_y = batch_y.cuda()
batch_im = batch_im.cuda()
batch_bta_im = batch_bta_im.cuda()
index_retrieval = [bidx[x] for x in x_reverse_sorted_index]
#Yield the batch data|
yield batch_x, batch_y, batch_im, batch_bta_im,list(reversed(sorted(batch_x_lengths))), index_retrieval
def translation_reorder(translation, length_sorted_index, id2word):
#Reorder translation
original_translation = [None] * len(translation)
for i,t in zip(length_sorted_index, translation):
original_translation[i] = [id2word.get(x, '<unk>') for x in t]
return original_translation
def translation_reorder_BPE(translation, length_sorted_index, id2word):
#Reorder translation
original_translation = [None] * len(translation)
for i,t in zip(length_sorted_index, translation):
BPE_translation_tokens = [id2word.get(x,'<unk>') for x in t]
#Processing the original translation such that
BPE_translation = ' '.join(BPE_translation_tokens)
#Search and Replace patterns
ori_translation = re.sub(r'@@ ',"",BPE_translation)
#Tokenlize the ori_translation and keep it in the orginal_translation list
original_translation[i] = ori_translation.split()
return original_translation
def translation_reorder_ATTN(attns, length_sorted_index):
#Reorder attention
original_attn = np.zeros(attns.shape)
for i,attn in zip(length_sorted_index, attns):
original_attn[i] = attn
return original_attn
| 42.591711 | 215 | 0.656618 | 7,630 | 48,299 | 3.847837 | 0.036697 | 0.041895 | 0.042917 | 0.058244 | 0.876018 | 0.871556 | 0.862053 | 0.854866 | 0.851153 | 0.846384 | 0 | 0.005328 | 0.265575 | 48,299 | 1,133 | 216 | 42.629303 | 0.822339 | 0.295948 | 0 | 0.706154 | 0 | 0 | 0.001143 | 0 | 0 | 0 | 0 | 0 | 0.007692 | 1 | 0.044615 | false | 0 | 0.016923 | 0 | 0.093846 | 0.003077 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f067141a3777f7c3ca9d66289e2111f84391077a | 311 | py | Python | storage/utils/__init__.py | Bibliotecaio/biblioteca | 584268b7615f2be5f011fad09b472ee8a06914e0 | [
"MIT"
] | null | null | null | storage/utils/__init__.py | Bibliotecaio/biblioteca | 584268b7615f2be5f011fad09b472ee8a06914e0 | [
"MIT"
] | null | null | null | storage/utils/__init__.py | Bibliotecaio/biblioteca | 584268b7615f2be5f011fad09b472ee8a06914e0 | [
"MIT"
] | null | null | null | from utils.common import *
from utils.pdftk_wrapper import *
from utils.docsplit_wrapper import *
from utils.sentry_client import sentry_client
from utils.image_helpers import set_watermark, get_image_size
from utils.gm_wrapper import *
from utils.seaweedfs_helpers import *
from utils.core_api_client import *
| 34.555556 | 61 | 0.845659 | 47 | 311 | 5.340426 | 0.404255 | 0.286853 | 0.298805 | 0.262948 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106109 | 311 | 8 | 62 | 38.875 | 0.902878 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f069b1f5b0b00feab77654798b92b509770c9b2a | 2,673 | py | Python | tests/batch_generation/test_generate_bad_urls.py | ashton-szabo/api-automation-tools | 279e258623cfe919a4385e63f3badaed66a61561 | [
"MIT"
] | null | null | null | tests/batch_generation/test_generate_bad_urls.py | ashton-szabo/api-automation-tools | 279e258623cfe919a4385e63f3badaed66a61561 | [
"MIT"
] | null | null | null | tests/batch_generation/test_generate_bad_urls.py | ashton-szabo/api-automation-tools | 279e258623cfe919a4385e63f3badaed66a61561 | [
"MIT"
] | 4 | 2022-03-09T06:11:59.000Z | 2022-03-10T02:09:34.000Z | import pytest
import apiautomationtools.batch_generation.batch_generation as bg
pytestmark = pytest.mark.batch_generation
def test_generate_bad_urls_sub_value():
# using the ';' to denote the start of the path arguments.
url = "https://httpbin.org/get;/houseId/1b/2c?param=value1&another_param=2"
bad_urls = bg.generate_bad_urls(url, "0")
expected_urls = [
"https://httpbin.org/get/houseId/0a/2c?param=value1&another_param=2",
"https://httpbin.org/get/houseId/1b/0a?param=value1&another_param=2",
"https://httpbin.org/get/houseId/1b/2c?param=aaaaa0&another_param=2",
"https://httpbin.org/get/houseId/1b/2c?param=value1&another_param=0",
"https://httpbin.org/get/houseId/0a/0a?param=aaaaa0&another_param=0",
]
assert bad_urls == expected_urls
def test_generate_bad_urls_replacements():
url = "https://httpbin.org/get/houseId/1b/2c?param=value1&another_param=2"
bad_urls = bg.generate_bad_urls(url, replacements=["2c", "9f"])
expected_urls = [
"https://httpbin.org/get/houseId/1b/9f?param=value1&another_param=2"
]
assert bad_urls == expected_urls
def test_generate_bad_urls_no_query_params():
url = "https://httpbin.org/get/houseId/1b/2c?param=value1&another_param=2"
bad_urls = bg.generate_bad_urls(url, "0", include_query_params=False)
expected_urls = [
"https://httpbin.org/get/houseId/0a/2c?param=value1&another_param=2",
"https://httpbin.org/get/houseId/1b/0a?param=value1&another_param=2",
"https://httpbin.org/get/houseId/0a/0a?param=value1&another_param=2",
]
assert bad_urls == expected_urls
def test_generate_bad_urls_full():
url = "https://httpbin.org/get/houseId/1b/2c?param1=value1&another_param=2"
bad_urls = bg.generate_bad_urls(url, "0", full=True)
expected_urls = [
"https://httpbin.org/get/houseId/1b/2c?param1=value1&another_param=2",
"https://httpbin.org/aaa/houseId/1b/2c?param1=value1&another_param=2",
"https://httpbin.org/get/aaaaaaa/1b/2c?param1=value1&another_param=2",
"https://httpbin.org/get/houseId/0a/2c?param1=value1&another_param=2",
"https://httpbin.org/get/houseId/1b/0a?param1=value1&another_param=2",
"https://httpbin.org/get/houseId/1b/2c?aaaaa0=value1&another_param=2",
"https://httpbin.org/get/houseId/1b/2c?param1=aaaaa0&another_param=2",
"https://httpbin.org/get/houseId/1b/2c?param1=value1&aaaaaaa_aaaaa=2",
"https://httpbin.org/get/houseId/1b/2c?param1=value1&another_param=0",
"https://httpbin.org/aaa/aaaaaaa/0a/0a?aaaaa0=aaaaa0&aaaaaaa_aaaaa=0",
]
assert bad_urls == expected_urls
| 46.086207 | 79 | 0.707445 | 400 | 2,673 | 4.55 | 0.135 | 0.151648 | 0.18956 | 0.207692 | 0.825824 | 0.813736 | 0.798901 | 0.78956 | 0.74011 | 0.74011 | 0 | 0.046632 | 0.133558 | 2,673 | 57 | 80 | 46.894737 | 0.739206 | 0.02095 | 0 | 0.304348 | 1 | 0.5 | 0.587763 | 0 | 0 | 0 | 0 | 0 | 0.086957 | 1 | 0.086957 | false | 0 | 0.043478 | 0 | 0.130435 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
6512a1bdf6b0b2d41c298e6640b399f5f2ac4a62 | 56,099 | py | Python | vdp/connector/v1alpha/connector_service_pb2_grpc.py | instill-ai/protogen-python | 6e118d34566b8d59e8bcd40e0ae28e0fc1a5d50f | [
"Apache-2.0"
] | 1 | 2022-03-22T09:09:46.000Z | 2022-03-22T09:09:46.000Z | vdp/connector/v1alpha/connector_service_pb2_grpc.py | instill-ai/protogen-python | 6e118d34566b8d59e8bcd40e0ae28e0fc1a5d50f | [
"Apache-2.0"
] | 4 | 2022-03-16T12:36:12.000Z | 2022-03-22T10:53:12.000Z | vdp/connector/v1alpha/connector_service_pb2_grpc.py | instill-ai/protogen-python | 6e118d34566b8d59e8bcd40e0ae28e0fc1a5d50f | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from vdp.connector.v1alpha import connector_definition_pb2 as vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2
from vdp.connector.v1alpha import connector_pb2 as vdp_dot_connector_dot_v1alpha_dot_connector__pb2
from vdp.connector.v1alpha import healthcheck_pb2 as vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2
class ConnectorServiceStub(object):
"""/////////////////////////////////////////////////////////////////////////////
Connector service responds to incoming connector requests.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Liveness = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/Liveness',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.LivenessRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.LivenessResponse.FromString,
)
self.Readiness = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/Readiness',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.ReadinessRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.ReadinessResponse.FromString,
)
self.ListSourceConnectorDefinition = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/ListSourceConnectorDefinition',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListSourceConnectorDefinitionRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListSourceConnectorDefinitionResponse.FromString,
)
self.GetSourceConnectorDefinition = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/GetSourceConnectorDefinition',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetSourceConnectorDefinitionRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetSourceConnectorDefinitionResponse.FromString,
)
self.ListDestinationConnectorDefinition = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/ListDestinationConnectorDefinition',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListDestinationConnectorDefinitionRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListDestinationConnectorDefinitionResponse.FromString,
)
self.GetDestinationConnectorDefinition = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/GetDestinationConnectorDefinition',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetDestinationConnectorDefinitionRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetDestinationConnectorDefinitionResponse.FromString,
)
self.CreateSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/CreateSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateSourceConnectorResponse.FromString,
)
self.ListSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/ListSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListSourceConnectorResponse.FromString,
)
self.GetSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/GetSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetSourceConnectorResponse.FromString,
)
self.UpdateSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/UpdateSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateSourceConnectorResponse.FromString,
)
self.DeleteSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/DeleteSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteSourceConnectorResponse.FromString,
)
self.LookUpSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/LookUpSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpSourceConnectorResponse.FromString,
)
self.ConnectSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/ConnectSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectSourceConnectorResponse.FromString,
)
self.DisconnectSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/DisconnectSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectSourceConnectorResponse.FromString,
)
self.RenameSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/RenameSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameSourceConnectorResponse.FromString,
)
self.ReadSourceConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/ReadSourceConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ReadSourceConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ReadSourceConnectorResponse.FromString,
)
self.CreateDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/CreateDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateDestinationConnectorResponse.FromString,
)
self.ListDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/ListDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListDestinationConnectorResponse.FromString,
)
self.GetDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/GetDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetDestinationConnectorResponse.FromString,
)
self.UpdateDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/UpdateDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateDestinationConnectorResponse.FromString,
)
self.DeleteDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/DeleteDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteDestinationConnectorResponse.FromString,
)
self.LookUpDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/LookUpDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpDestinationConnectorResponse.FromString,
)
self.ConnectDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/ConnectDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectDestinationConnectorResponse.FromString,
)
self.DisconnectDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/DisconnectDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectDestinationConnectorResponse.FromString,
)
self.RenameDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/RenameDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameDestinationConnectorResponse.FromString,
)
self.WriteDestinationConnector = channel.unary_unary(
'/vdp.connector.v1alpha.ConnectorService/WriteDestinationConnector',
request_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.WriteDestinationConnectorRequest.SerializeToString,
response_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.WriteDestinationConnectorResponse.FromString,
)
class ConnectorServiceServicer(object):
"""/////////////////////////////////////////////////////////////////////////////
Connector service responds to incoming connector requests.
"""
def Liveness(self, request, context):
"""///////////////////////////////
Connector definition methods
///////////////////////////////
Liveness method receives a LivenessRequest message and returns a
LivenessResponse message.
See https://github.com/grpc/grpc/blob/master/doc/health-checking.md
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Readiness(self, request, context):
"""Readiness method receives a ReadinessRequest message and returns a
ReadinessResponse message.
See https://github.com/grpc/grpc/blob/master/doc/health-checking.md
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSourceConnectorDefinition(self, request, context):
"""ListSourceConnectorDefinition method receives a
ListSourceConnectorDefinitionRequest message and returns a
ListSourceConnectorDefinitionResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSourceConnectorDefinition(self, request, context):
"""GetSourceConnectorDefinition method receives a
GetSourceConnectorDefinitionRequest message and returns a
GetGetSourceConnectorDefinitionResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListDestinationConnectorDefinition(self, request, context):
"""ListDestinationConnectorDefinition method receives a
ListDestinationConnectorDefinitionRequest message and returns a
ListDestinationConnectorDefinitionResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDestinationConnectorDefinition(self, request, context):
"""GetDestinationConnectorDefinition method receives a
GetDestinationConnectorDefinitionRequest message and returns a
GetGetDestinationConnectorDefinitionResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateSourceConnector(self, request, context):
"""///////////////////////////////
Connector methods
///////////////////////////////
*SourceConnector methods
CreateSourceConnector method receives a CreateSourceConnectorRequest
message and returns a CreateSourceConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSourceConnector(self, request, context):
"""ListSourceConnector method receives a ListSourceConnectorRequest message
and returns a ListSourceConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSourceConnector(self, request, context):
"""GetSourceConnector method receives a GetSourceConnectorRequest message and
returns a GetSourceConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSourceConnector(self, request, context):
"""UpdateSourceConnector method receives a UpdateSourceConnectorRequest
message and returns a UpdateSourceConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteSourceConnector(self, request, context):
"""DeleteSourceConnector method receives a DeleteSourceConnectorRequest
message and returns a DeleteSourceConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def LookUpSourceConnector(self, request, context):
"""LookUpSourceConnector method receives a LookUpSourceConnectorRequest
message and returns a LookUpSourceConnectorResponse
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ConnectSourceConnector(self, request, context):
"""Connect a source connector.
The "state" of the connector after connecting is "CONNECTED".
ConnectSourceConnector can be called on SourceConnector in the state
`DISCONNECTED`; SourceConnector in a different state (including
`CONNECTED`) returns an error.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DisconnectSourceConnector(self, request, context):
"""Disconnect a source connector.
The "state" of the connector after disconnecting is "DISCONNECTED".
DisconnectSourceConnector can be called on SourceConnector in the state
`CONNECTED`; SourceConnector in a different state (including
`DISCONNECTED`) returns an error.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RenameSourceConnector(self, request, context):
"""RenameSourceConnector method receives a RenameSourceConnectorRequest
message and returns a RenameSourceConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReadSourceConnector(self, request, context):
"""ReadSourceConnector method receives a ReadSourceConnectorRequest
message and returns a ReadSourceConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateDestinationConnector(self, request, context):
"""*DestinationConnector methods
CreateDestinationConnector method receives a
CreateDestinationConnectorRequest message and returns a
CreateDestinationConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListDestinationConnector(self, request, context):
"""ListDestinationConnector method receives a ListDestinationConnectorRequest
message and returns a ListDestinationConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDestinationConnector(self, request, context):
"""GetDestinationConnector method receives a GetDestinationConnectorRequest
message and returns a GetDestinationConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateDestinationConnector(self, request, context):
"""UpdateDestinationConnector method receives a
UpdateDestinationConnectorRequest message and returns a
UpdateDestinationConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteDestinationConnector(self, request, context):
"""DeleteDestinationConnector method receives a
DeleteDestinationConnectorRequest message and returns a
DeleteDestinationConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def LookUpDestinationConnector(self, request, context):
"""LookUpDestinationConnector method receives a
LookUpDestinationConnectorRequest message and returns a
LookUpDestinationConnectorResponse
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ConnectDestinationConnector(self, request, context):
"""Connect a destination connector.
The "state" of the connector after connecting is "CONNECTED".
ConnectDestinationConnector can be called on DestinationConnector in the
state `DISCONNECTED`; DestinationConnector in a different state (including
`CONNECTED`) returns an error.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DisconnectDestinationConnector(self, request, context):
"""Disconnect a destination connector.
The "state" of the connector after disconnecting is "DISCONNECTED".
DisconnectDestinationConnector can be called on DestinationConnector in the
state `CONNECTED`; DestinationConnector in a different state (including
`DISCONNECTED`) returns an error.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RenameDestinationConnector(self, request, context):
"""RenameDestinationConnector method receives a
RenameDestinationConnectorRequest message and returns a
RenameDestinationConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def WriteDestinationConnector(self, request, context):
"""WriteDestinationConnector method receives a
WriteDestinationConnectorRequest message and returns a
WriteDestinationConnectorResponse message.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ConnectorServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Liveness': grpc.unary_unary_rpc_method_handler(
servicer.Liveness,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.LivenessRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.LivenessResponse.SerializeToString,
),
'Readiness': grpc.unary_unary_rpc_method_handler(
servicer.Readiness,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.ReadinessRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.ReadinessResponse.SerializeToString,
),
'ListSourceConnectorDefinition': grpc.unary_unary_rpc_method_handler(
servicer.ListSourceConnectorDefinition,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListSourceConnectorDefinitionRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListSourceConnectorDefinitionResponse.SerializeToString,
),
'GetSourceConnectorDefinition': grpc.unary_unary_rpc_method_handler(
servicer.GetSourceConnectorDefinition,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetSourceConnectorDefinitionRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetSourceConnectorDefinitionResponse.SerializeToString,
),
'ListDestinationConnectorDefinition': grpc.unary_unary_rpc_method_handler(
servicer.ListDestinationConnectorDefinition,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListDestinationConnectorDefinitionRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListDestinationConnectorDefinitionResponse.SerializeToString,
),
'GetDestinationConnectorDefinition': grpc.unary_unary_rpc_method_handler(
servicer.GetDestinationConnectorDefinition,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetDestinationConnectorDefinitionRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetDestinationConnectorDefinitionResponse.SerializeToString,
),
'CreateSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.CreateSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateSourceConnectorResponse.SerializeToString,
),
'ListSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.ListSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListSourceConnectorResponse.SerializeToString,
),
'GetSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.GetSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetSourceConnectorResponse.SerializeToString,
),
'UpdateSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateSourceConnectorResponse.SerializeToString,
),
'DeleteSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.DeleteSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteSourceConnectorResponse.SerializeToString,
),
'LookUpSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.LookUpSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpSourceConnectorResponse.SerializeToString,
),
'ConnectSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.ConnectSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectSourceConnectorResponse.SerializeToString,
),
'DisconnectSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.DisconnectSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectSourceConnectorResponse.SerializeToString,
),
'RenameSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.RenameSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameSourceConnectorResponse.SerializeToString,
),
'ReadSourceConnector': grpc.unary_unary_rpc_method_handler(
servicer.ReadSourceConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ReadSourceConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ReadSourceConnectorResponse.SerializeToString,
),
'CreateDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.CreateDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateDestinationConnectorResponse.SerializeToString,
),
'ListDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.ListDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListDestinationConnectorResponse.SerializeToString,
),
'GetDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.GetDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetDestinationConnectorResponse.SerializeToString,
),
'UpdateDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.UpdateDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateDestinationConnectorResponse.SerializeToString,
),
'DeleteDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.DeleteDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteDestinationConnectorResponse.SerializeToString,
),
'LookUpDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.LookUpDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpDestinationConnectorResponse.SerializeToString,
),
'ConnectDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.ConnectDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectDestinationConnectorResponse.SerializeToString,
),
'DisconnectDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.DisconnectDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectDestinationConnectorResponse.SerializeToString,
),
'RenameDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.RenameDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameDestinationConnectorResponse.SerializeToString,
),
'WriteDestinationConnector': grpc.unary_unary_rpc_method_handler(
servicer.WriteDestinationConnector,
request_deserializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.WriteDestinationConnectorRequest.FromString,
response_serializer=vdp_dot_connector_dot_v1alpha_dot_connector__pb2.WriteDestinationConnectorResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'vdp.connector.v1alpha.ConnectorService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class ConnectorService(object):
"""/////////////////////////////////////////////////////////////////////////////
Connector service responds to incoming connector requests.
"""
@staticmethod
def Liveness(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/Liveness',
vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.LivenessRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.LivenessResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Readiness(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/Readiness',
vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.ReadinessRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_healthcheck__pb2.ReadinessResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListSourceConnectorDefinition(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/ListSourceConnectorDefinition',
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListSourceConnectorDefinitionRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListSourceConnectorDefinitionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSourceConnectorDefinition(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/GetSourceConnectorDefinition',
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetSourceConnectorDefinitionRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetSourceConnectorDefinitionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListDestinationConnectorDefinition(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/ListDestinationConnectorDefinition',
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListDestinationConnectorDefinitionRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.ListDestinationConnectorDefinitionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetDestinationConnectorDefinition(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/GetDestinationConnectorDefinition',
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetDestinationConnectorDefinitionRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__definition__pb2.GetDestinationConnectorDefinitionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/CreateSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/ListSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/GetSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/UpdateSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/DeleteSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def LookUpSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/LookUpSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ConnectSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/ConnectSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DisconnectSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/DisconnectSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RenameSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/RenameSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ReadSourceConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/ReadSourceConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ReadSourceConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ReadSourceConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/CreateDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.CreateDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/ListDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ListDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/GetDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.GetDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/UpdateDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.UpdateDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/DeleteDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DeleteDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def LookUpDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/LookUpDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.LookUpDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ConnectDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/ConnectDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.ConnectDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DisconnectDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/DisconnectDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.DisconnectDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def RenameDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/RenameDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.RenameDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def WriteDestinationConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/vdp.connector.v1alpha.ConnectorService/WriteDestinationConnector',
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.WriteDestinationConnectorRequest.SerializeToString,
vdp_dot_connector_dot_v1alpha_dot_connector__pb2.WriteDestinationConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 56.780364 | 162 | 0.720369 | 4,662 | 56,099 | 8.26834 | 0.043973 | 0.094949 | 0.061873 | 0.074247 | 0.81018 | 0.775884 | 0.771344 | 0.710639 | 0.70366 | 0.663372 | 0 | 0.008579 | 0.216635 | 56,099 | 987 | 163 | 56.837893 | 0.868566 | 0.090287 | 0 | 0.523929 | 1 | 0 | 0.1025 | 0.077184 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06801 | false | 0 | 0.005038 | 0.032746 | 0.109572 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
652afa211f3c0a4aafa37a8aed56c820f2b9e64d | 2,215 | py | Python | irekua_permissions/data_collections/devices.py | CONABIO-audio/irekua-permissions | 563c558e59788054504c852a6a6017bce7469a12 | [
"BSD-4-Clause"
] | null | null | null | irekua_permissions/data_collections/devices.py | CONABIO-audio/irekua-permissions | 563c558e59788054504c852a6a6017bce7469a12 | [
"BSD-4-Clause"
] | 2 | 2020-02-12T03:00:51.000Z | 2020-04-26T23:27:52.000Z | irekua_permissions/data_collections/devices.py | CONABIO-audio/irekua-permissions | 563c558e59788054504c852a6a6017bce7469a12 | [
"BSD-4-Clause"
] | null | null | null | def view(user, collection_device=None, **kwargs):
if collection_device is None:
return False
collection = collection_device.collection
if collection.is_open:
return True
if not user.is_authenticated:
return False
if collection_device.created_by == user:
return True
if user.is_special:
return True
if collection.collection_type.is_admin(user):
return True
if collection.is_admin(user):
return True
if not collection.is_user(user):
return False
role = collection.get_user_role(user)
return role.has_permission('view_collection_devices')
def create(user, collection=None, **kwargs):
if collection is None:
return False
if not user.is_authenticated:
return False
if user.is_superuser:
return True
if collection.collection_type.is_admin(user):
return True
if collection.is_admin(user):
return True
if not collection.is_user(user):
return False
role = collection.get_user_role(user)
can = role.has_permission('add_collection_device')
print('can', can)
return can
def change(user, collection_device=None, **kwargs):
if collection_device is None:
return False
if not user.is_authenticated:
return False
if collection_device.created_by == user:
return True
if user.is_superuser:
return True
collection = collection_device.collection
if collection.collection_type.is_admin(user):
return True
if collection.is_admin(user):
return True
if not collection.is_user(user):
return False
role = collection.get_user_role(user)
return role.has_permission('change_collection_devices')
def delete(user, collection_device=None, **kwargs):
if collection_device is None:
return False
if not user.is_authenticated:
return False
if collection_device.created_by == user:
return True
if user.is_superuser:
return True
collection = collection_device.collection
if collection.collection_type.is_admin(user):
return True
return collection.is_admin(user)
| 21.930693 | 59 | 0.676749 | 278 | 2,215 | 5.197842 | 0.115108 | 0.124567 | 0.099654 | 0.099654 | 0.845675 | 0.845675 | 0.802076 | 0.802076 | 0.802076 | 0.802076 | 0 | 0 | 0.254628 | 2,215 | 100 | 60 | 22.15 | 0.875227 | 0 | 0 | 0.808824 | 0 | 0 | 0.032506 | 0.031151 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0 | 0 | 0.5 | 0.014706 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
65376aa9c4eb404c6abc84953e6521f99521ffda | 2,453 | py | Python | irrigator_pro/farms/migrations/0024_rain_and_irrigation_allow_null.py | warnes/irrigatorpro | 4838f8832bdbf87f394a0298adc5dabfc26e82e8 | [
"MIT"
] | null | null | null | irrigator_pro/farms/migrations/0024_rain_and_irrigation_allow_null.py | warnes/irrigatorpro | 4838f8832bdbf87f394a0298adc5dabfc26e82e8 | [
"MIT"
] | null | null | null | irrigator_pro/farms/migrations/0024_rain_and_irrigation_allow_null.py | warnes/irrigatorpro | 4838f8832bdbf87f394a0298adc5dabfc26e82e8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from decimal import Decimal
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('farms', '0023_reorder_source_choices'),
]
operations = [
migrations.AlterField(
model_name='probereading',
name='irrigation',
field=models.DecimalField(decimal_places=2, default=0.0, validators=[django.core.validators.MinValueValidator(Decimal('0'))], max_digits=4, blank=True, null=True, verbose_name=b'Irrigation in inches'),
preserve_default=True,
),
migrations.AlterField(
model_name='probereading',
name='rain',
field=models.DecimalField(decimal_places=2, default=0.0, validators=[django.core.validators.MinValueValidator(Decimal('0'))], max_digits=4, blank=True, null=True, verbose_name=b'Rainfall in inches'),
preserve_default=True,
),
migrations.AlterField(
model_name='waterhistory',
name='irrigation',
field=models.DecimalField(decimal_places=2, default=0.0, validators=[django.core.validators.MinValueValidator(Decimal('0'))], max_digits=4, blank=True, null=True, verbose_name=b'Irrigation in inches'),
preserve_default=True,
),
migrations.AlterField(
model_name='waterhistory',
name='rain',
field=models.DecimalField(decimal_places=2, default=0.0, validators=[django.core.validators.MinValueValidator(Decimal('0'))], max_digits=4, blank=True, null=True, verbose_name=b'Rainfall in inches'),
preserve_default=True,
),
migrations.AlterField(
model_name='waterregister',
name='irrigation',
field=models.DecimalField(decimal_places=2, default=0.0, validators=[django.core.validators.MinValueValidator(Decimal('0'))], max_digits=4, blank=True, null=True, verbose_name=b'Irrigation in inches'),
preserve_default=True,
),
migrations.AlterField(
model_name='waterregister',
name='rain',
field=models.DecimalField(decimal_places=2, default=0.0, validators=[django.core.validators.MinValueValidator(Decimal('0'))], max_digits=4, blank=True, null=True, verbose_name=b'Rainfall in inches'),
preserve_default=True,
),
]
| 46.283019 | 213 | 0.659193 | 267 | 2,453 | 5.913858 | 0.191011 | 0.044332 | 0.088664 | 0.110196 | 0.867638 | 0.867638 | 0.834072 | 0.834072 | 0.834072 | 0.834072 | 0 | 0.018191 | 0.215654 | 2,453 | 52 | 214 | 47.173077 | 0.802495 | 0.008561 | 0 | 0.782609 | 0 | 0 | 0.110288 | 0.011111 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.086957 | 0 | 0.152174 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
65393ab2a02eb5c6d7c8d16531a7095536099af9 | 6,171 | py | Python | tests/unit/types/test_input_object.py | erezsh/tartiflette | c945b02e9025e2524393c1eaec2191745bfc38f4 | [
"MIT"
] | null | null | null | tests/unit/types/test_input_object.py | erezsh/tartiflette | c945b02e9025e2524393c1eaec2191745bfc38f4 | [
"MIT"
] | null | null | null | tests/unit/types/test_input_object.py | erezsh/tartiflette | c945b02e9025e2524393c1eaec2191745bfc38f4 | [
"MIT"
] | null | null | null | from collections import OrderedDict
from tartiflette.types.argument import GraphQLArgument
from tartiflette.types.input_object import GraphQLInputObjectType
def test_graphql_input_object_init():
input_object = GraphQLInputObjectType(name="Name",
fields=OrderedDict([
("test", GraphQLArgument(name="arg", gql_type="Int")),
("another", GraphQLArgument(name="arg", gql_type="String")),
]),
description="description")
assert input_object.name == "Name"
assert input_object.fields == OrderedDict([
("test", GraphQLArgument(name="arg", gql_type="Int")),
("another", GraphQLArgument(name="arg", gql_type="String")),
])
assert input_object.description == "description"
def test_graphql_input_object_repr():
input_object = GraphQLInputObjectType(name="Name",
fields=OrderedDict([
("test",
GraphQLArgument(name="arg",
gql_type="Int")),
("another",
GraphQLArgument(name="arg",
gql_type="String")),
]),
description="description")
assert input_object.__repr__() == "GraphQLInputObjectType(name='Name', " \
"fields=OrderedDict([" \
"('test', GraphQLArgument(name='arg', gql_type='Int', default_value=None, description=None)), " \
"('another', GraphQLArgument(name='arg', gql_type='String', default_value=None, description=None))" \
"]), description='description')"
assert input_object == eval(repr(input_object))
def test_graphql_input_object_eq():
input_object = GraphQLInputObjectType(name="Name",
fields=OrderedDict([
("test",
GraphQLArgument(name="arg",
gql_type="Int")),
("another",
GraphQLArgument(name="arg",
gql_type="String")),
]),
description="description")
## Same
assert input_object == input_object
assert input_object == GraphQLInputObjectType(name="Name",
fields=OrderedDict([
("test",
GraphQLArgument(name="arg",
gql_type="Int")),
("another",
GraphQLArgument(name="arg",
gql_type="String")),
]),
description="description")
# Currently we ignore the description in comparing
assert input_object == GraphQLInputObjectType(name="Name",
fields=OrderedDict([
("test",
GraphQLArgument(name="arg",
gql_type="Int")),
("another",
GraphQLArgument(name="arg",
gql_type="String")),
]))
## Different
assert input_object != GraphQLInputObjectType(name="Name",
fields=OrderedDict([
("another",
GraphQLArgument(
name="arg",
gql_type="String")),
("test",
GraphQLArgument(
name="arg",
gql_type="Int")),
# We reversed the order of arguments
]))
assert input_object != GraphQLInputObjectType(name="Name",
fields=OrderedDict())
assert input_object != GraphQLInputObjectType(name="OtherName",
fields=OrderedDict([
("another",
GraphQLArgument(
name="arg",
gql_type="String")),
("test",
GraphQLArgument(
name="arg",
gql_type="Int")),
# We reversed the order of arguments
]))
| 58.771429 | 139 | 0.323124 | 299 | 6,171 | 6.491639 | 0.147157 | 0.113344 | 0.204019 | 0.231839 | 0.8305 | 0.717671 | 0.717671 | 0.696033 | 0.638846 | 0.638846 | 0 | 0 | 0.600551 | 6,171 | 105 | 140 | 58.771429 | 0.787424 | 0.021552 | 0 | 0.784091 | 0 | 0.022727 | 0.09617 | 0.019068 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.034091 | false | 0 | 0.034091 | 0 | 0.068182 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
65413312a3663eada278f869a01fad644903a210 | 190,453 | py | Python | sdks/python/appcenter_sdk/api/test_api.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | null | null | null | sdks/python/appcenter_sdk/api/test_api.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 6 | 2019-10-23T06:38:53.000Z | 2022-01-22T07:57:58.000Z | sdks/python/appcenter_sdk/api/test_api.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 2 | 2019-10-23T06:31:05.000Z | 2021-08-21T17:32:47.000Z | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from appcenter_sdk.api_client import ApiClient
class testApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def test_getDeviceSetOfUser(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getDeviceSetOfUser # noqa: E501
Gets a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getDeviceSetOfUser(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: DeviceSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getDeviceSetOfUser_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getDeviceSetOfUser_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getDeviceSetOfUser_with_http_info(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getDeviceSetOfUser # noqa: E501
Gets a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getDeviceSetOfUser_with_http_info(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: DeviceSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getDeviceSetOfUser" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `test_getDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getDeviceSetOfUser`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/user/device_sets/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceSet', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_updateDeviceSetOfUser(self, id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_updateDeviceSetOfUser # noqa: E501
Updates a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_updateDeviceSetOfUser(id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_updateDeviceSetOfUser_with_http_info(id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_updateDeviceSetOfUser_with_http_info(id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_updateDeviceSetOfUser_with_http_info(self, id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_updateDeviceSetOfUser # noqa: E501
Updates a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_updateDeviceSetOfUser_with_http_info(id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_updateDeviceSetOfUser" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `test_updateDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_updateDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_updateDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_updateDeviceSetOfUser`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/user/device_sets/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestCloudErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_deleteDeviceSetOfUser(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_deleteDeviceSetOfUser # noqa: E501
Deletes a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_deleteDeviceSetOfUser(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_deleteDeviceSetOfUser_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_deleteDeviceSetOfUser_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_deleteDeviceSetOfUser_with_http_info(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_deleteDeviceSetOfUser # noqa: E501
Deletes a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_deleteDeviceSetOfUser_with_http_info(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_deleteDeviceSetOfUser" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `test_deleteDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_deleteDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_deleteDeviceSetOfUser`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/csv', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/user/device_sets/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_listDeviceSetsOfUser(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_listDeviceSetsOfUser # noqa: E501
Lists device sets belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_listDeviceSetsOfUser(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_listDeviceSetsOfUser_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_listDeviceSetsOfUser_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_listDeviceSetsOfUser_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_listDeviceSetsOfUser # noqa: E501
Lists device sets belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_listDeviceSetsOfUser_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_listDeviceSetsOfUser" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_listDeviceSetsOfUser`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_listDeviceSetsOfUser`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/user/device_sets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_createDeviceSetOfUser(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createDeviceSetOfUser # noqa: E501
Creates a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createDeviceSetOfUser(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_createDeviceSetOfUser_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_createDeviceSetOfUser_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_createDeviceSetOfUser_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createDeviceSetOfUser # noqa: E501
Creates a device set belonging to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createDeviceSetOfUser_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_createDeviceSetOfUser" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_createDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_createDeviceSetOfUser`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_createDeviceSetOfUser`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/user/device_sets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestCloudErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getAllTestRunsForSeries(self, test_series_slug, owner_name, app_name, **kwargs): # noqa: E501
"""test_getAllTestRunsForSeries # noqa: E501
Returns list of all test runs for a given test series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getAllTestRunsForSeries(test_series_slug, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_series_slug: The slug of the test series (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getAllTestRunsForSeries_with_http_info(test_series_slug, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getAllTestRunsForSeries_with_http_info(test_series_slug, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getAllTestRunsForSeries_with_http_info(self, test_series_slug, owner_name, app_name, **kwargs): # noqa: E501
"""test_getAllTestRunsForSeries # noqa: E501
Returns list of all test runs for a given test series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getAllTestRunsForSeries_with_http_info(test_series_slug, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_series_slug: The slug of the test series (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_series_slug', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getAllTestRunsForSeries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_series_slug' is set
if ('test_series_slug' not in params or
params['test_series_slug'] is None):
raise ValueError("Missing the required parameter `test_series_slug` when calling `test_getAllTestRunsForSeries`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getAllTestRunsForSeries`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getAllTestRunsForSeries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_series_slug' in params:
path_params['test_series_slug'] = params['test_series_slug'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_series/{test_series_slug}/test_runs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_deleteTestSeries(self, test_series_slug, owner_name, app_name, **kwargs): # noqa: E501
"""test_deleteTestSeries # noqa: E501
Deletes a single test series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_deleteTestSeries(test_series_slug, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_series_slug: The slug of the test series (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_deleteTestSeries_with_http_info(test_series_slug, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_deleteTestSeries_with_http_info(test_series_slug, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_deleteTestSeries_with_http_info(self, test_series_slug, owner_name, app_name, **kwargs): # noqa: E501
"""test_deleteTestSeries # noqa: E501
Deletes a single test series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_deleteTestSeries_with_http_info(test_series_slug, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_series_slug: The slug of the test series (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_series_slug', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_deleteTestSeries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_series_slug' is set
if ('test_series_slug' not in params or
params['test_series_slug'] is None):
raise ValueError("Missing the required parameter `test_series_slug` when calling `test_deleteTestSeries`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_deleteTestSeries`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_deleteTestSeries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_series_slug' in params:
path_params['test_series_slug'] = params['test_series_slug'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/csv', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_series/{test_series_slug}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_patchTestSeries(self, test_series_slug, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_patchTestSeries # noqa: E501
Updates name and slug of a test series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_patchTestSeries(test_series_slug, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_series_slug: The slug of the test series (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestSeries
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_patchTestSeries_with_http_info(test_series_slug, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_patchTestSeries_with_http_info(test_series_slug, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_patchTestSeries_with_http_info(self, test_series_slug, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_patchTestSeries # noqa: E501
Updates name and slug of a test series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_patchTestSeries_with_http_info(test_series_slug, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_series_slug: The slug of the test series (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestSeries
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_series_slug', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_patchTestSeries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_series_slug' is set
if ('test_series_slug' not in params or
params['test_series_slug'] is None):
raise ValueError("Missing the required parameter `test_series_slug` when calling `test_patchTestSeries`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_patchTestSeries`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_patchTestSeries`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_patchTestSeries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_series_slug' in params:
path_params['test_series_slug'] = params['test_series_slug'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_series/{test_series_slug}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestSeries', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getAllTestSeries(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getAllTestSeries # noqa: E501
Returns list of all test series for an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getAllTestSeries(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string query: A query string to filter test series(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getAllTestSeries_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getAllTestSeries_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getAllTestSeries_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getAllTestSeries # noqa: E501
Returns list of all test series for an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getAllTestSeries_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string query: A query string to filter test series(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'query'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getAllTestSeries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getAllTestSeries`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getAllTestSeries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'query' in params:
query_params.append(('query', params['query'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_series', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_createTestSeries(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createTestSeries # noqa: E501
Creates new test series for an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createTestSeries(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_createTestSeries_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_createTestSeries_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_createTestSeries_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createTestSeries # noqa: E501
Creates new test series for an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createTestSeries_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_createTestSeries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_createTestSeries`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_createTestSeries`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_createTestSeries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_series', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestCloudErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_stopTestRun(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_stopTestRun # noqa: E501
Stop a test run execution # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_stopTestRun(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run to be stopped (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRun
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_stopTestRun_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_stopTestRun_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_stopTestRun_with_http_info(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_stopTestRun # noqa: E501
Stop a test run execution # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_stopTestRun_with_http_info(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run to be stopped (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRun
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_stopTestRun" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_stopTestRun`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_stopTestRun`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_stopTestRun`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}/stop', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestRun', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getTestRunState(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestRunState # noqa: E501
Gets state of the test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestRunState(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRunState
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getTestRunState_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getTestRunState_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getTestRunState_with_http_info(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestRunState # noqa: E501
Gets state of the test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestRunState_with_http_info(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRunState
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getTestRunState" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_getTestRunState`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getTestRunState`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getTestRunState`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}/state', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestRunState', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_startTestRun(self, test_run_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_startTestRun # noqa: E501
Starts test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_startTestRun(test_run_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Option required to start the test run (required)
:return: TestCloudStartTestRunResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_startTestRun_with_http_info(test_run_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_startTestRun_with_http_info(test_run_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_startTestRun_with_http_info(self, test_run_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_startTestRun # noqa: E501
Starts test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_startTestRun_with_http_info(test_run_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Option required to start the test run (required)
:return: TestCloudStartTestRunResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_startTestRun" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_startTestRun`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_startTestRun`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_startTestRun`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_startTestRun`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}/start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestCloudStartTestRunResult', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getTestReport(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestReport # noqa: E501
Returns a single test report # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestReport(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestReport
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getTestReport_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getTestReport_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getTestReport_with_http_info(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestReport # noqa: E501
Returns a single test report # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestReport_with_http_info(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestReport
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getTestReport" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_getTestReport`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getTestReport`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getTestReport`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}/report', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestReport', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_uploadHashesBatch(self, test_run_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_uploadHashesBatch # noqa: E501
Adds file with the given hash to a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_uploadHashesBatch(test_run_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param array body: File hash information (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_uploadHashesBatch_with_http_info(test_run_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_uploadHashesBatch_with_http_info(test_run_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_uploadHashesBatch_with_http_info(self, test_run_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_uploadHashesBatch # noqa: E501
Adds file with the given hash to a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_uploadHashesBatch_with_http_info(test_run_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param array body: File hash information (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_uploadHashesBatch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_uploadHashesBatch`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_uploadHashesBatch`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_uploadHashesBatch`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_uploadHashesBatch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}/hashes/batch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_uploadHash(self, test_run_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_uploadHash # noqa: E501
Adds file with the given hash to a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_uploadHash(test_run_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: File hash information (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_uploadHash_with_http_info(test_run_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_uploadHash_with_http_info(test_run_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_uploadHash_with_http_info(self, test_run_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_uploadHash # noqa: E501
Adds file with the given hash to a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_uploadHash_with_http_info(test_run_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: File hash information (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_uploadHash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_uploadHash`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_uploadHash`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_uploadHash`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_uploadHash`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/csv', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}/hashes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_startUploadingFile(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_startUploadingFile # noqa: E501
Uploads file for a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_startUploadingFile(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_startUploadingFile_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_startUploadingFile_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_startUploadingFile_with_http_info(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_startUploadingFile # noqa: E501
Uploads file for a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_startUploadingFile_with_http_info(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_startUploadingFile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_startUploadingFile`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_startUploadingFile`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_startUploadingFile`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/csv', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}/files', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getTestRun(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestRun # noqa: E501
Returns a single test runs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestRun(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRun
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getTestRun_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getTestRun_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getTestRun_with_http_info(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestRun # noqa: E501
Returns a single test runs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestRun_with_http_info(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRun
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getTestRun" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_getTestRun`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getTestRun`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getTestRun`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestRun', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_archiveTestRun(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_archiveTestRun # noqa: E501
Logically deletes a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_archiveTestRun(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRun
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_archiveTestRun_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_archiveTestRun_with_http_info(test_run_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_archiveTestRun_with_http_info(self, test_run_id, owner_name, app_name, **kwargs): # noqa: E501
"""test_archiveTestRun # noqa: E501
Logically deletes a test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_archiveTestRun_with_http_info(test_run_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string test_run_id: The ID of the test run (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestRun
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_run_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_archiveTestRun" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_run_id' is set
if ('test_run_id' not in params or
params['test_run_id'] is None):
raise ValueError("Missing the required parameter `test_run_id` when calling `test_archiveTestRun`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_archiveTestRun`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_archiveTestRun`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_run_id' in params:
path_params['test_run_id'] = params['test_run_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs/{test_run_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestRun', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getTestRuns(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestRuns # noqa: E501
Returns a list of test runs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestRuns(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getTestRuns_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getTestRuns_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getTestRuns_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getTestRuns # noqa: E501
Returns a list of test runs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getTestRuns_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getTestRuns" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getTestRuns`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getTestRuns`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_createTestRun(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_createTestRun # noqa: E501
Creates a new test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createTestRun(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_createTestRun_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_createTestRun_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_createTestRun_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_createTestRun # noqa: E501
Creates a new test run # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createTestRun_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_createTestRun" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_createTestRun`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_createTestRun`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/csv', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test_runs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportTestRun(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportTestRun # noqa: E501
Lists test run data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportTestRun(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRTestRun
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportTestRun_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_gdprExportTestRun_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_gdprExportTestRun_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportTestRun # noqa: E501
Lists test run data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportTestRun_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRTestRun
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportTestRun" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_gdprExportTestRun`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_gdprExportTestRun`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test/export/testRuns', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRTestRun', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportPipelineTest(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportPipelineTest # noqa: E501
Lists pipeline test data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportPipelineTest(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRPipelineTest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportPipelineTest_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_gdprExportPipelineTest_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_gdprExportPipelineTest_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportPipelineTest # noqa: E501
Lists pipeline test data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportPipelineTest_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRPipelineTest
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportPipelineTest" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_gdprExportPipelineTest`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_gdprExportPipelineTest`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test/export/pipelineTests', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRPipelineTest', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportHashFile(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportHashFile # noqa: E501
Lists hash file data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportHashFile(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRHashFile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportHashFile_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_gdprExportHashFile_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_gdprExportHashFile_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportHashFile # noqa: E501
Lists hash file data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportHashFile_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRHashFile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportHashFile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_gdprExportHashFile`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_gdprExportHashFile`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test/export/hashFiles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRHashFile', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportFileSetFile(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportFileSetFile # noqa: E501
Lists file set file data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportFileSetFile(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRFileSetFile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportFileSetFile_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_gdprExportFileSetFile_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_gdprExportFileSetFile_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportFileSetFile # noqa: E501
Lists file set file data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportFileSetFile_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRFileSetFile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportFileSetFile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_gdprExportFileSetFile`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_gdprExportFileSetFile`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test/export/fileSetFiles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRFileSetFile', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportApp(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportApp # noqa: E501
Lists all the endpoints available for Test app data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportApp(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportApp_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_gdprExportApp_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_gdprExportApp_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_gdprExportApp # noqa: E501
Lists all the endpoints available for Test app data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportApp_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: TestGDPRResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportApp" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_gdprExportApp`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_gdprExportApp`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/test/export', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRResourceList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getSubscriptions(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getSubscriptions # noqa: E501
Get information about the currently active subscriptions, if any # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getSubscriptions(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Subscription
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getSubscriptions_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getSubscriptions_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getSubscriptions_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getSubscriptions # noqa: E501
Get information about the currently active subscriptions, if any # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getSubscriptions_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Subscription
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getSubscriptions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getSubscriptions`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getSubscriptions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/subscriptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Subscription', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_createSubscription(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_createSubscription # noqa: E501
Accept a free trial subscription # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createSubscription(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Subscription
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_createSubscription_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_createSubscription_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_createSubscription_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_createSubscription # noqa: E501
Accept a free trial subscription # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createSubscription_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: Subscription
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_createSubscription" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_createSubscription`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_createSubscription`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/subscriptions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Subscription', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getDeviceSetOfOwner(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getDeviceSetOfOwner # noqa: E501
Gets a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getDeviceSetOfOwner(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: DeviceSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getDeviceSetOfOwner_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getDeviceSetOfOwner_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getDeviceSetOfOwner_with_http_info(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_getDeviceSetOfOwner # noqa: E501
Gets a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getDeviceSetOfOwner_with_http_info(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: DeviceSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getDeviceSetOfOwner" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `test_getDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getDeviceSetOfOwner`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/owner/device_sets/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceSet', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_updateDeviceSetOfOwner(self, id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_updateDeviceSetOfOwner # noqa: E501
Updates a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_updateDeviceSetOfOwner(id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_updateDeviceSetOfOwner_with_http_info(id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_updateDeviceSetOfOwner_with_http_info(id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_updateDeviceSetOfOwner_with_http_info(self, id, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_updateDeviceSetOfOwner # noqa: E501
Updates a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_updateDeviceSetOfOwner_with_http_info(id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_updateDeviceSetOfOwner" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `test_updateDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_updateDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_updateDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_updateDeviceSetOfOwner`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/owner/device_sets/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestCloudErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_deleteDeviceSetOfOwner(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_deleteDeviceSetOfOwner # noqa: E501
Deletes a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_deleteDeviceSetOfOwner(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_deleteDeviceSetOfOwner_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_deleteDeviceSetOfOwner_with_http_info(id, owner_name, app_name, **kwargs) # noqa: E501
return data
def test_deleteDeviceSetOfOwner_with_http_info(self, id, owner_name, app_name, **kwargs): # noqa: E501
"""test_deleteDeviceSetOfOwner # noqa: E501
Deletes a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_deleteDeviceSetOfOwner_with_http_info(id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string id: The UUID of the device set (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_deleteDeviceSetOfOwner" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `test_deleteDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_deleteDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_deleteDeviceSetOfOwner`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/csv', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/owner/device_sets/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_listDeviceSetsOfOwner(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_listDeviceSetsOfOwner # noqa: E501
Lists device sets belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_listDeviceSetsOfOwner(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_listDeviceSetsOfOwner_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_listDeviceSetsOfOwner_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_listDeviceSetsOfOwner_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_listDeviceSetsOfOwner # noqa: E501
Lists device sets belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_listDeviceSetsOfOwner_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_listDeviceSetsOfOwner" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_listDeviceSetsOfOwner`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_listDeviceSetsOfOwner`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/owner/device_sets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_createDeviceSetOfOwner(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createDeviceSetOfOwner # noqa: E501
Creates a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createDeviceSetOfOwner(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_createDeviceSetOfOwner_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_createDeviceSetOfOwner_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_createDeviceSetOfOwner_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createDeviceSetOfOwner # noqa: E501
Creates a device set belonging to the owner # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createDeviceSetOfOwner_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_createDeviceSetOfOwner" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_createDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_createDeviceSetOfOwner`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_createDeviceSetOfOwner`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/owner/device_sets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestCloudErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_createDeviceSelection(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createDeviceSelection # noqa: E501
Creates a short ID for a list of devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createDeviceSelection(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_createDeviceSelection_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.test_createDeviceSelection_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def test_createDeviceSelection_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""test_createDeviceSelection # noqa: E501
Creates a short ID for a list of devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_createDeviceSelection_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: (required)
:return: TestCloudErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_createDeviceSelection" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_createDeviceSelection`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_createDeviceSelection`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `test_createDeviceSelection`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/device_selection', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestCloudErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_getDeviceConfigurations(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getDeviceConfigurations # noqa: E501
Returns a list of available devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getDeviceConfigurations(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string app_upload_id: The ID of the test run(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_getDeviceConfigurations_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.test_getDeviceConfigurations_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def test_getDeviceConfigurations_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""test_getDeviceConfigurations # noqa: E501
Returns a list of available devices # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_getDeviceConfigurations_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string app_upload_id: The ID of the test run(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'app_upload_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_getDeviceConfigurations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `test_getDeviceConfigurations`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `test_getDeviceConfigurations`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'app_upload_id' in params:
query_params.append(('app_upload_id', params['app_upload_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/device_configurations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportUser(self, **kwargs): # noqa: E501
"""test_gdprExportUser # noqa: E501
Lists user data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportUser(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportUser_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.test_gdprExportUser_with_http_info(**kwargs) # noqa: E501
return data
def test_gdprExportUser_with_http_info(self, **kwargs): # noqa: E501
"""test_gdprExportUser # noqa: E501
Lists user data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportUser_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportUser" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/account/test/export/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRUser', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportFeatureFlag(self, **kwargs): # noqa: E501
"""test_gdprExportFeatureFlag # noqa: E501
Lists feature flag data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportFeatureFlag(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRFeatureFlag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportFeatureFlag_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.test_gdprExportFeatureFlag_with_http_info(**kwargs) # noqa: E501
return data
def test_gdprExportFeatureFlag_with_http_info(self, **kwargs): # noqa: E501
"""test_gdprExportFeatureFlag # noqa: E501
Lists feature flag data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportFeatureFlag_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRFeatureFlag
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportFeatureFlag" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/account/test/export/featureFlags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRFeatureFlag', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExportAccount(self, **kwargs): # noqa: E501
"""test_gdprExportAccount # noqa: E501
Lists account data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportAccount(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExportAccount_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.test_gdprExportAccount_with_http_info(**kwargs) # noqa: E501
return data
def test_gdprExportAccount_with_http_info(self, **kwargs): # noqa: E501
"""test_gdprExportAccount # noqa: E501
Lists account data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExportAccount_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExportAccount" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/account/test/export/accounts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRAccount', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_gdprExport(self, **kwargs): # noqa: E501
"""test_gdprExport # noqa: E501
Lists all the endpoints available for Test account data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExport(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.test_gdprExport_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.test_gdprExport_with_http_info(**kwargs) # noqa: E501
return data
def test_gdprExport_with_http_info(self, **kwargs): # noqa: E501
"""test_gdprExport # noqa: E501
Lists all the endpoints available for Test account data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.test_gdprExport_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: TestGDPRResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_gdprExport" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/account/test/export', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TestGDPRResourceList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.532114 | 139 | 0.619712 | 22,316 | 190,453 | 5.052025 | 0.012771 | 0.052155 | 0.029803 | 0.039737 | 0.980247 | 0.973683 | 0.972113 | 0.966277 | 0.965851 | 0.964751 | 0 | 0.016905 | 0.288722 | 190,453 | 4,374 | 140 | 43.542067 | 0.815347 | 0.073215 | 0 | 0.835128 | 0 | 0 | 0.234319 | 0.068068 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.001657 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e8ee4041319842c79ff3cff15c63f7f1ebbb977f | 15,576 | py | Python | test/test_stream.py | LRSEngineering/ws4py | 8fae15f7dee6e11316b4a2640b2297c7c6a5f15e | [
"BSD-3-Clause"
] | 1 | 2015-11-06T02:38:45.000Z | 2015-11-06T02:38:45.000Z | test/test_stream.py | idobatter/WebSocket-for-Python | f7bd95b07f5df185c32d1eb6a2d10e1e78d43637 | [
"BSD-3-Clause"
] | null | null | null | test/test_stream.py | idobatter/WebSocket-for-Python | f7bd95b07f5df185c32d1eb6a2d10e1e78d43637 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
import os
import struct
from ws4py.framing import Frame, \
OPCODE_CONTINUATION, OPCODE_TEXT, \
OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG
from ws4py.streaming import Stream
from ws4py.messaging import TextMessage, BinaryMessage, \
CloseControlMessage, PingControlMessage, PongControlMessage
from ws4py.compat import *
class WSStreamTest(unittest.TestCase):
def test_empty_close_message(self):
f = Frame(opcode=OPCODE_CLOSE, body=b'', fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(type(s.closing), CloseControlMessage)
def test_missing_masking_key_when_expected(self):
f = Frame(opcode=OPCODE_TEXT, body=b'hello', fin=1, masking_key=None).build()
s = Stream(expect_masking=True)
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_using_masking_key_when_unexpected(self):
f = Frame(opcode=OPCODE_TEXT, body=b'hello', fin=1, masking_key=os.urandom(4)).build()
s = Stream(expect_masking=False)
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_text_messages_cannot_interleave(self):
s = Stream()
f = Frame(opcode=OPCODE_TEXT, body=b'hello',
fin=0, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
f = Frame(opcode=OPCODE_TEXT, body=b'there',
fin=1, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_binary_messages_cannot_interleave(self):
s = Stream()
f = Frame(opcode=OPCODE_BINARY, body=os.urandom(2),
fin=0, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
f = Frame(opcode=OPCODE_BINARY, body=os.urandom(7),
fin=1, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_binary_and_text_messages_cannot_interleave(self):
s = Stream()
f = Frame(opcode=OPCODE_TEXT, body=b'hello',
fin=0, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
f = Frame(opcode=OPCODE_BINARY, body=os.urandom(7),
fin=1, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_continuation_frame_before_message_started_is_invalid(self):
f = Frame(opcode=OPCODE_CONTINUATION, body=b'hello',
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_invalid_encoded_bytes(self):
f = Frame(opcode=OPCODE_TEXT, body=b'h\xc3llo',
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1007)
def test_invalid_encoded_bytes_on_continuation(self):
s = Stream()
f = Frame(opcode=OPCODE_TEXT, body=b'hello',
fin=0, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
f = Frame(opcode=OPCODE_CONTINUATION, body=b'h\xc3llo',
fin=1, masking_key=os.urandom(4)).build()
s.parser.send(f)
next(s.parser)
self.assertNotEqual(s.errors, [])
self.assertIsInstance(s.errors[0], CloseControlMessage)
self.assertEqual(s.errors[0].code, 1007)
def test_too_large_close_message(self):
payload = struct.pack("!H", 1000) + b'*' * 330
f = Frame(opcode=OPCODE_CLOSE, body=payload,
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.errors), 0)
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(s.closing, None)
self.assertEqual(len(s.errors), 1)
self.assertEqual(type(s.errors[0]), CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_invalid_sized_close_message(self):
payload = b'boom'
f = Frame(opcode=OPCODE_CLOSE, body=payload,
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.errors), 0)
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(type(s.closing), CloseControlMessage)
self.assertEqual(s.closing.code, 1002)
def test_close_message_of_size_one_are_invalid(self):
payload = b'*'
f = Frame(opcode=OPCODE_CLOSE, body=payload,
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.errors), 0)
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(type(s.closing), CloseControlMessage)
self.assertEqual(s.closing.code, 1002)
def test_invalid_close_message_type(self):
payload = struct.pack("!H", 1500) + b'hello'
f = Frame(opcode=OPCODE_CLOSE, body=payload,
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.errors), 0)
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(type(s.closing), CloseControlMessage)
self.assertEqual(s.closing.code, 1002)
def test_invalid_close_message_reason_encoding(self):
payload = struct.pack("!H", 1000) + b'h\xc3llo'
f = Frame(opcode=OPCODE_CLOSE, body=payload,
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.errors), 0)
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(s.closing, None)
self.assertEqual(type(s.errors[0]), CloseControlMessage)
self.assertEqual(s.errors[0].code, 1007)
def test_protocol_exception_from_frame_parsing(self):
payload = struct.pack("!H", 1000) + b'hello'
f = Frame(opcode=OPCODE_CLOSE, body=payload,
fin=1, masking_key=os.urandom(4))
f.rsv1 = 1
f = f.build()
s = Stream()
self.assertEqual(len(s.errors), 0)
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(s.closing, None)
self.assertEqual(type(s.errors[0]), CloseControlMessage)
self.assertEqual(s.errors[0].code, 1002)
def test_close_message_received(self):
payload = struct.pack("!H", 1000) + b'hello'
f = Frame(opcode=OPCODE_CLOSE, body=payload,
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(s.closing, None)
s.parser.send(f)
self.assertEqual(type(s.closing), CloseControlMessage)
self.assertEqual(s.closing.code, 1000)
self.assertEqual(s.closing.reason, b'hello')
def test_ping_message_received(self):
msg = b'ping me'
f = Frame(opcode=OPCODE_PING, body=msg, fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.pings), 0)
s.parser.send(f)
self.assertEqual(len(s.pings), 1)
def test_pong_message_received(self):
msg = b'pong!'
f = Frame(opcode=OPCODE_PONG, body=msg, fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.pongs), 0)
s.parser.send(f)
self.assertEqual(len(s.pongs), 1)
def test_text_message_received(self):
msg = b'hello there'
f = Frame(opcode=OPCODE_TEXT, body=msg, fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(len(s.messages), 0)
s.parser.send(f)
self.assertEqual(len(s.messages), 1)
def test_incremental_text_message_received(self):
msg = b'hello there'
f = Frame(opcode=OPCODE_TEXT, body=msg, fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(s.has_message, False)
bytes = f
for index, byte in enumerate(bytes):
s.parser.send(bytes[index:index+1])
self.assertEqual(s.has_message, True)
def test_text_message_received(self):
msg = b'hello there'
f = Frame(opcode=OPCODE_TEXT, body=msg, fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(s.has_message, False)
s.parser.send(f)
self.assertEqual(s.message.completed, True)
def test_text_message_with_continuation_received(self):
msg = b'hello there'
f = Frame(opcode=OPCODE_TEXT, body=msg, fin=0, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(s.has_message, False)
s.parser.send(f)
self.assertEqual(s.message.completed, False)
for i in range(3):
f = Frame(opcode=OPCODE_CONTINUATION, body=msg, fin=0, masking_key=os.urandom(4)).build()
s.parser.send(f)
self.assertEqual(s.has_message, False)
self.assertEqual(s.message.completed, False)
self.assertEqual(s.message.opcode, OPCODE_TEXT)
f = Frame(opcode=OPCODE_CONTINUATION, body=msg, fin=1, masking_key=os.urandom(4)).build()
s.parser.send(f)
self.assertEqual(s.has_message, True)
self.assertEqual(s.message.completed, True)
self.assertEqual(s.message.opcode, OPCODE_TEXT)
def test_text_message_with_continuation_and_ping_in_between(self):
msg = b'hello there'
key = os.urandom(4)
f = Frame(opcode=OPCODE_TEXT, body=msg, fin=0, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(s.has_message, False)
s.parser.send(f)
self.assertEqual(s.message.completed, False)
for i in range(3):
f = Frame(opcode=OPCODE_CONTINUATION, body=msg, fin=0, masking_key=os.urandom(4)).build()
s.parser.send(f)
self.assertEqual(s.has_message, False)
self.assertEqual(s.message.completed, False)
self.assertEqual(s.message.opcode, OPCODE_TEXT)
f = Frame(opcode=OPCODE_PING, body=b'ping me', fin=1, masking_key=os.urandom(4)).build()
self.assertEqual(len(s.pings), i)
s.parser.send(f)
self.assertEqual(len(s.pings), i+1)
f = Frame(opcode=OPCODE_CONTINUATION, body=msg, fin=1, masking_key=os.urandom(4)).build()
s.parser.send(f)
self.assertEqual(s.has_message, True)
self.assertEqual(s.message.opcode, OPCODE_TEXT)
self.assertEqual(s.message.completed, True)
def test_binary_message_received(self):
msg = os.urandom(16)
f = Frame(opcode=OPCODE_BINARY, body=msg, fin=1, masking_key=os.urandom(4)).build()
s = Stream()
self.assertEqual(s.has_message, False)
s.parser.send(f)
self.assertEqual(s.message.completed, True)
def test_binary_message_with_continuation_received(self):
msg = os.urandom(16)
key = os.urandom(4)
f = Frame(opcode=OPCODE_BINARY, body=msg, fin=0, masking_key=key).build()
s = Stream()
self.assertEqual(s.has_message, False)
s.parser.send(f)
self.assertEqual(s.has_message, False)
for i in range(3):
f = Frame(opcode=OPCODE_CONTINUATION, body=msg, fin=0, masking_key=key).build()
s.parser.send(f)
self.assertEqual(s.has_message, False)
self.assertEqual(s.message.completed, False)
self.assertEqual(s.message.opcode, OPCODE_BINARY)
f = Frame(opcode=OPCODE_CONTINUATION, body=msg, fin=1, masking_key=key).build()
s.parser.send(f)
self.assertEqual(s.has_message, True)
self.assertEqual(s.message.completed, True)
self.assertEqual(s.message.opcode, OPCODE_BINARY)
def test_helper_with_unicode_text_message(self):
s = Stream()
m = s.text_message(u'hello there!')
self.assertIsInstance(m, TextMessage)
self.assertFalse(m.is_binary)
self.assertTrue(m.is_text)
self.assertEqual(m.opcode, OPCODE_TEXT)
self.assertEqual(m.encoding, 'utf-8')
self.assertIsInstance(m.data, bytes)
self.assertEqual(len(m), 12)
self.assertEqual(len(m.data), 12)
self.assertEqual(m.data, b'hello there!')
def test_helper_with_bytes_text_message(self):
s = Stream()
m = s.text_message('hello there!')
self.assertIsInstance(m, TextMessage)
self.assertFalse(m.is_binary)
self.assertTrue(m.is_text)
self.assertEqual(m.opcode, OPCODE_TEXT)
self.assertEqual(m.encoding, 'utf-8')
self.assertIsInstance(m.data, bytes)
self.assertEqual(len(m), 12)
self.assertEqual(len(m.data), 12)
self.assertEqual(m.data, b'hello there!')
def test_helper_with_binary_message(self):
msg = os.urandom(16)
s = Stream()
m = s.binary_message(msg)
self.assertIsInstance(m, BinaryMessage)
self.assertTrue(m.is_binary)
self.assertFalse(m.is_text)
self.assertEqual(m.opcode, OPCODE_BINARY)
self.assertIsInstance(m.data, bytes)
self.assertEqual(len(m), 16)
self.assertEqual(len(m.data), 16)
self.assertEqual(m.data, msg)
def test_helper_ping_message(self):
s = Stream()
m = s.ping('sos')
self.assertIsInstance(m, bytes)
self.assertEqual(len(m), 5)
def test_helper_masked_ping_message(self):
s = Stream(always_mask=True)
m = s.ping('sos')
self.assertIsInstance(m, bytes)
self.assertEqual(len(m), 9)
def test_helper_pong_message(self):
s = Stream()
m = s.pong('sos')
self.assertIsInstance(m, bytes)
self.assertEqual(len(m), 5)
def test_helper_masked_pong_message(self):
s = Stream(always_mask=True)
m = s.pong('sos')
self.assertIsInstance(m, bytes)
self.assertEqual(len(m), 9)
def test_closing_parser_should_release_resources(self):
f = Frame(opcode=OPCODE_TEXT, body=b'hello',
fin=1, masking_key=os.urandom(4)).build()
s = Stream()
s.parser.send(f)
s.parser.close()
if __name__ == '__main__':
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for testcase in [WSStreamTest]:
tests = loader.loadTestsFromTestCase(testcase)
suite.addTests(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
| 37.990244 | 101 | 0.625963 | 2,048 | 15,576 | 4.629883 | 0.075195 | 0.155031 | 0.096182 | 0.070238 | 0.865218 | 0.83706 | 0.80521 | 0.792871 | 0.771462 | 0.735182 | 0 | 0.019764 | 0.24313 | 15,576 | 409 | 102 | 38.08313 | 0.784545 | 0.001348 | 0 | 0.713068 | 0 | 0 | 0.016203 | 0 | 0 | 0 | 0 | 0 | 0.369318 | 1 | 0.09375 | false | 0 | 0.019886 | 0 | 0.116477 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
332376331eba9665014cf43ff88152b458c672f3 | 6,717 | py | Python | py3canvas/tests/pages.py | tylerclair/py3canvas | 7485d458606b65200f0ffa5bbe597a9d0bee189f | [
"MIT"
] | null | null | null | py3canvas/tests/pages.py | tylerclair/py3canvas | 7485d458606b65200f0ffa5bbe597a9d0bee189f | [
"MIT"
] | null | null | null | py3canvas/tests/pages.py | tylerclair/py3canvas | 7485d458606b65200f0ffa5bbe597a9d0bee189f | [
"MIT"
] | null | null | null | """Pages API Tests for Version 1.0.
This is a testing template for the generated PagesAPI Class.
"""
import unittest
import requests
import secrets
from py3canvas.apis.pages import PagesAPI
from py3canvas.apis.pages import Page
from py3canvas.apis.pages import Pagerevision
class TestPagesAPI(unittest.TestCase):
"""Tests for the PagesAPI."""
def setUp(self):
self.client = PagesAPI(secrets.instance_address, secrets.access_token)
def test_show_front_page_courses(self):
"""Integration test for the PagesAPI.show_front_page_courses method."""
course_id = None # Change me!!
r = self.client.show_front_page_courses(course_id)
def test_show_front_page_groups(self):
"""Integration test for the PagesAPI.show_front_page_groups method."""
group_id = None # Change me!!
r = self.client.show_front_page_groups(group_id)
def test_duplicate_page(self):
"""Integration test for the PagesAPI.duplicate_page method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_update_create_front_page_courses(self):
"""Integration test for the PagesAPI.update_create_front_page_courses method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_update_create_front_page_groups(self):
"""Integration test for the PagesAPI.update_create_front_page_groups method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_list_pages_courses(self):
"""Integration test for the PagesAPI.list_pages_courses method."""
course_id = None # Change me!!
r = self.client.list_pages_courses(
course_id, order=None, published=None, search_term=None, sort=None
)
def test_list_pages_groups(self):
"""Integration test for the PagesAPI.list_pages_groups method."""
group_id = None # Change me!!
r = self.client.list_pages_groups(
group_id, order=None, published=None, search_term=None, sort=None
)
def test_create_page_courses(self):
"""Integration test for the PagesAPI.create_page_courses method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_create_page_groups(self):
"""Integration test for the PagesAPI.create_page_groups method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_show_page_courses(self):
"""Integration test for the PagesAPI.show_page_courses method."""
course_id = None # Change me!!
url = None # Change me!!
r = self.client.show_page_courses(course_id, url)
def test_show_page_groups(self):
"""Integration test for the PagesAPI.show_page_groups method."""
group_id = None # Change me!!
url = None # Change me!!
r = self.client.show_page_groups(group_id, url)
def test_update_create_page_courses(self):
"""Integration test for the PagesAPI.update_create_page_courses method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_update_create_page_groups(self):
"""Integration test for the PagesAPI.update_create_page_groups method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_delete_page_courses(self):
"""Integration test for the PagesAPI.delete_page_courses method."""
course_id = None # Change me!!
url = None # Change me!!
r = self.client.delete_page_courses(course_id, url)
def test_delete_page_groups(self):
"""Integration test for the PagesAPI.delete_page_groups method."""
group_id = None # Change me!!
url = None # Change me!!
r = self.client.delete_page_groups(group_id, url)
def test_list_revisions_courses(self):
"""Integration test for the PagesAPI.list_revisions_courses method."""
course_id = None # Change me!!
url = None # Change me!!
r = self.client.list_revisions_courses(course_id, url)
def test_list_revisions_groups(self):
"""Integration test for the PagesAPI.list_revisions_groups method."""
group_id = None # Change me!!
url = None # Change me!!
r = self.client.list_revisions_groups(group_id, url)
def test_show_revision_courses_latest(self):
"""Integration test for the PagesAPI.show_revision_courses_latest method."""
course_id = None # Change me!!
url = None # Change me!!
r = self.client.show_revision_courses_latest(course_id, url, summary=None)
def test_show_revision_groups_latest(self):
"""Integration test for the PagesAPI.show_revision_groups_latest method."""
group_id = None # Change me!!
url = None # Change me!!
r = self.client.show_revision_groups_latest(group_id, url, summary=None)
def test_show_revision_courses_revision_id(self):
"""Integration test for the PagesAPI.show_revision_courses_revision_id method."""
course_id = None # Change me!!
url = None # Change me!!
revision_id = None # Change me!!
r = self.client.show_revision_courses_revision_id(
course_id, revision_id, url, summary=None
)
def test_show_revision_groups_revision_id(self):
"""Integration test for the PagesAPI.show_revision_groups_revision_id method."""
group_id = None # Change me!!
url = None # Change me!!
revision_id = None # Change me!!
r = self.client.show_revision_groups_revision_id(
group_id, revision_id, url, summary=None
)
def test_revert_to_revision_courses(self):
"""Integration test for the PagesAPI.revert_to_revision_courses method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_revert_to_revision_groups(self):
"""Integration test for the PagesAPI.revert_to_revision_groups method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
| 40.221557 | 126 | 0.691827 | 887 | 6,717 | 4.994363 | 0.087937 | 0.058691 | 0.070429 | 0.114221 | 0.903612 | 0.855305 | 0.831151 | 0.806546 | 0.716479 | 0.564108 | 0 | 0.000967 | 0.229865 | 6,717 | 166 | 127 | 40.463855 | 0.855403 | 0.44633 | 0 | 0.393258 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.269663 | false | 0.101124 | 0.067416 | 0 | 0.348315 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
681d0dfacf56070039429b80dba64ede31733048 | 5,463 | py | Python | src/graph_transpiler/webdnn/frontend/tensorflow/ops/gen_lookup_ops.py | steerapi/webdnn | 1df51cc094e5a528cfd3452c264905708eadb491 | [
"MIT"
] | 1 | 2021-04-09T15:55:35.000Z | 2021-04-09T15:55:35.000Z | src/graph_transpiler/webdnn/frontend/tensorflow/ops/gen_lookup_ops.py | steerapi/webdnn | 1df51cc094e5a528cfd3452c264905708eadb491 | [
"MIT"
] | null | null | null | src/graph_transpiler/webdnn/frontend/tensorflow/ops/gen_lookup_ops.py | steerapi/webdnn | 1df51cc094e5a528cfd3452c264905708eadb491 | [
"MIT"
] | null | null | null | import tensorflow as tf
from webdnn.frontend.tensorflow.converter import TensorFlowConverter
@TensorFlowConverter.register_handler("HashTable")
def hash_table_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("HashTableV2")
def hash_table_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("InitializeTable")
def initialize_table_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("InitializeTableFromTextFile")
def initialize_table_from_text_file_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("InitializeTableFromTextFileV2")
def initialize_table_from_text_file_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("InitializeTableV2")
def initialize_table_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableExport")
def lookup_table_export_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableExportV2")
def lookup_table_export_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableFind")
def lookup_table_find_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableFindV2")
def lookup_table_find_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableImport")
def lookup_table_import_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableImportV2")
def lookup_table_import_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableInsert")
def lookup_table_insert_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableInsertV2")
def lookup_table_insert_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableSize")
def lookup_table_size_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("LookupTableSizeV2")
def lookup_table_size_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MutableDenseHashTable")
def mutable_dense_hash_table_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MutableDenseHashTableV2")
def mutable_dense_hash_table_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MutableHashTable")
def mutable_hash_table_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MutableHashTableOfTensors")
def mutable_hash_table_of_tensors_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MutableHashTableOfTensorsV2")
def mutable_hash_table_of_tensors_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MutableHashTableV2")
def mutable_hash_table_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
| 47.921053 | 102 | 0.820611 | 600 | 5,463 | 7.223333 | 0.098333 | 0.213198 | 0.233503 | 0.187817 | 0.822335 | 0.815413 | 0.788648 | 0.788648 | 0.788648 | 0.788648 | 0 | 0.004369 | 0.078162 | 5,463 | 113 | 103 | 48.345133 | 0.856235 | 0 | 0 | 0.323529 | 0 | 0 | 0.348893 | 0.112392 | 0 | 0 | 0 | 0 | 0 | 1 | 0.323529 | false | 0 | 0.088235 | 0 | 0.411765 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
687d4efb3d3d4001dd4100d64cad0715ccd6fbce | 96 | py | Python | streamer/functions.py | efikalti/Lambda | ff239ed945e8d8fb9e050c3d7c2c752a98cfb6a8 | [
"Apache-2.0"
] | null | null | null | streamer/functions.py | efikalti/Lambda | ff239ed945e8d8fb9e050c3d7c2c752a98cfb6a8 | [
"Apache-2.0"
] | null | null | null | streamer/functions.py | efikalti/Lambda | ff239ed945e8d8fb9e050c3d7c2c752a98cfb6a8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import time
def current_time_to_ms():
return int(time.time() * 1000)
| 16 | 34 | 0.697917 | 16 | 96 | 4 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.049383 | 0.15625 | 96 | 5 | 35 | 19.2 | 0.740741 | 0.208333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
68aebafc12682b407f2810c5b4469f39dc2bd9dd | 41,038 | py | Python | pyPowerUp/mde.py | ConorMcNamara/pyPowerUp | 4b14366dbae9372d2af3465749ef180ea3beefc4 | [
"MIT"
] | null | null | null | pyPowerUp/mde.py | ConorMcNamara/pyPowerUp | 4b14366dbae9372d2af3465749ef180ea3beefc4 | [
"MIT"
] | null | null | null | pyPowerUp/mde.py | ConorMcNamara/pyPowerUp | 4b14366dbae9372d2af3465749ef180ea3beefc4 | [
"MIT"
] | null | null | null | from math import sqrt, ceil
from typing import Dict
from pyPowerUp.utils import _mde
def mde_bcra3f2(
rho2: float,
n: float,
J: float,
K: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
g2: int = 0,
r21: int = 0,
r22: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Three-Level Blocked (Fixed) Cluster-level Random Assignment Design,
Treatment at Level 2
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: int
Number of level 3 units
power: float, default=0.8
Statistical power of the test.
alpha: float, default=0.10
Probability of Type I error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 2 units randomly assigned to treatment within level 3 units
g2: int, default=0
Number of covariates at level 2
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = ceil(K * (J - 2) - g2)
sse = sqrt(
rho2 * (1 - r22) / (p * (1 - p) * J * K)
+ (1 - rho2) * (1 - r21) / (p * (1 - p) * J * K * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bcra3r2(
rho2: float,
rho3: float,
omega3: float,
n: float,
J: float,
K: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
g3: int = 0,
r21: int = 0,
r22: int = 0,
r2t3: float = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Three-Level Blocked Cluster-level Random Assignment Design,
Treatment at Level 2
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
omega3: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 3 units to the residual
variance at level 3
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: int
Number of level 3 units
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 2 units randomly assigned to the treatment within level 3 units
g3: int, default=0
Number of covariates at level 3
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
r2t3: float, default=0
Proportion of treatment effect variance among level 3 units explained by level 3 covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = K - g3 - 1
sse = sqrt(
rho3 * omega3 * (1 - r2t3) / K
+ rho2 * (1 - r22) / (p * (1 - p) * J * K)
+ (1 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bcra4f3(
rho2: float,
rho3: float,
n: int,
J: int,
K: int,
L: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
r21: int = 0,
r22: int = 0,
r23: int = 0,
g3: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Four-Level Blocked (Fixed) Cluster-level Random Assignment Design,
Treatment at Level 3
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: float
Harmonic mean of level 3 units across level 4 units (or simple average)
L: int
Number of level 4 units
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 3 units randomly assigned to the treatment within level 4 units
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
r23: float, default=0
Proportion of level 3 variance in the outcome explained by level 3 covariates
g3: int, default=0
Number of covariates at level 3
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = L * (K - 2) - g3
sse = sqrt(
rho3 * (1 - r23) / (p * (1 - p) * K * L)
+ rho2 * (1 - r22) / (p * (1 - p) * J * K * L)
+ (1 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * L * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bcra4r2(
rho2: float,
rho3: float,
rho4: float,
omega3: float,
omega4: float,
n: int,
J: int,
K: int,
L: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
r21: int = 0,
r22: int = 0,
r2t3: int = 0,
r2t4: int = 0,
g4: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Four-Level Blocked Cluster-level Random Assignment Design, Treatment
at Level 2
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
rho4: float
Proportion of variance in the outcome between level 4 untis (unconditional ICC4)
omega3: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 3 units to the residual
variance at level 3
omega4: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 4 units to the residual
variance at level 4
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: float
Harmonic mean of level 3 units across level 4 units (or simple average)
L: int
Number of level 4 units
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 2 units randomly assignewd to treatment within level 3 units
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
r2t3: float, default=0
Proportion of treatment effect variance among level 3 units explained by level 3 covariates
r2t4: float, default=0
Proportion of treatment effect variance among level 4 units explained by level 4 covariates
g4: int
Number of covariates at level 4
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = L - g4 - 1
sse = sqrt(
rho4 * omega4 * (1 - r2t4) / L
+ rho3 * omega3 * (1 - r2t3) / (K * L)
+ rho2 * (1 - r22) / (p * (1 - p) * J * K * L)
+ (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * L * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bcra4r3(
rho2: float,
rho3: float,
rho4: float,
omega4: float,
n: float,
J: float,
K: int,
L: int,
power: float = 0.8,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
r21: int = 0,
r22: int = 0,
r23: int = 0,
r2t4: int = 0,
g4: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Four-Level Blocked Cluster-level Random Assignment Design, Treatment
at Level 3
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
rho4: float
Proportion of variance in the outcome between level 4 units (unconditional ICC4)
omega4: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 4 units to the residual
variance at level 4
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: float
Harmonic mean of level 3 units across level 4 units (or simple average)
L: int
Number of level 4 units
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0
Average proportion of level 3 units randomly assigned to treatment within level 4 units
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
r23: float, default=0
Proportion of level 3 variance in the outcome explained by level 3 covariates
r2t4: float, default=0
Proportion of treatment effect variance among level 4 units explained by level 4 covariates
g4: int, default=0
Number of covariates at level 4
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = L - g4 - 1
sse = sqrt(
rho4 * omega4 * (1 - r2t4) / L
+ rho3 * (1 - r23) / (p * (1 - p) * K * L)
+ rho2 * (1 - r22) / (p * (1 - p) * J * K * L)
+ (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * L * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bira2c1(
n: float,
J: float,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.5,
g1: int = 0,
r21: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Two-Level Blocked (Constant Treatment Effect) Individual-level
Random Assignment Design, Treatment at Level 1
Parameters
----------
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 1 units randomly assigned to treatment within level 2 units
g1: int, default=0
Number of covariates at level 1
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = ceil(J * (n - 1) - g1 - 1)
sse = sqrt((1 - r21) / (p * (1 - p) * J * n))
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bira2f1(
n: int,
J: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
g1: int = 0,
r21: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Two-Level Blocked (Fixed) Individual-level Random Assignment Design,
Treatment at Level 1
Parameters
----------
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 1 units randomly assigned to treatment within level 2 units
g1: int, default=0
Number of covariates at level 1
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = J * (n - 2) - g1
sse = sqrt((1 - r21) / (p * (1 - p) * J * n))
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bira2r1(
rho2: float,
omega2: float,
n: float,
J: float,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
g2: int = 0,
r21: int = 0,
r2t2: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Two-Level Blocked Individual-level Random Assignment Design,
Treatment at Level 1
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
omega2: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 2 units to the residual
variance at level 2
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 1 units randomly assigned to treatment within level 2 units
g2: int, default=0
Number of covariates at level 2
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r2t2: float, default=0
Proportion of treatment effect variance among level 2 units explained by level 2 covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = ceil(J - g2 - 1)
sse = sqrt(
rho2 * omega2 * (1 - r2t2) / J + (1 - rho2) * (1 - r21) / (p * (1 - p) * J * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bira3r1(
rho2: float,
rho3: float,
omega2: float,
omega3: float,
n: float,
J: float,
K: int,
power: float = 0.8,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
r21: int = 0,
r2t2: int = 0,
r2t3: int = 0,
g3: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
omega2: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 2 units to the residual
variance at level 2
omega3: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 3 units to the residual
variance at level 3
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: int
Number of level 3 units
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0
Average proportion of level 1 units randomly assigned to treatment within level 2 units
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r2t2: float, default=0
Proportion of treatment effect variance among level 2 units explained by level 2 covariates
r2t3: float, default=0
Proportion of treatment effect variance among level 3 units explaiend by level 3 covariates
g3: int, default=0
Number of covariates at level 3
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = K - g3 - 1
sse = sqrt(
rho3 * omega3 * (1 - r2t3) / K
+ rho2 * omega2 * (1 - r2t2) / (J * K)
+ (1 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_bira4r1(
rho2: float,
rho3: float,
rho4: float,
omega2: float,
omega3: float,
omega4: float,
n: float,
J: float,
K: int,
L: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
r21: int = 0,
r2t2: int = 0,
r2t3: int = 0,
r2t4: int = 0,
g4: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Four-Level Blocked Individual-level Random Assignment Design,
Treatment at Level 1
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
rho4: float
Proportion of variance in the outcome between level 4 units (unconditional ICC4)
omega2: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 2 units to the residual
variance at level 2
omega3: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 3 units to the residual
variance at level 3
omega4: float
Treatment effect heterogeneity as ratio of treatment effect variance among level 4 units to the residual
variance at level 4
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: float
Harmonic mean of level 3 units across level 4 units (or simple average)
L: int
Number of level 4 units
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Average proportion of level 1 units randomly assigned to treatment within level 2 units
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r2t2: float, default=0
Proportion of treatment effect variance among level 2 units explained by level 2 covariates
r2t3: float, default=0
Proportion of treatment effect variance among level 3 units explained by level 3 covariates
r2t4: float, default=0
Proportion of treatment effect variance among level 4 units explained by level 4 covariates
g4: int, default=0
Number of covariates at level 4
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = L - g4 - 1
sse = sqrt(
rho4 * omega4 * (1 - r2t4) / L
+ rho3 * omega3 * (1 - r2t3) / (K * L)
+ rho2 * omega2 * (1 - r2t2) / (J * K * L)
+ (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * L * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_cra2r2(
rho2: float,
n: float,
J: float,
power: float = 0.80,
alpha: float = 0.05,
two_tailed: bool = True,
p: float = 0.50,
g2: int = 0,
r21: int = 0,
r22: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Two-level Cluster-randomized Trials to Detect Main, Moderation and
Mediation Effects
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Proportion of level 2 units randomly assigned to treatment
g2: int, default=0
Number of covariates at level 2
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = ceil(J - g2 - 2)
sse = sqrt(
rho2 * (1 - r22) / (p * (1 - p) * J)
+ (1 - rho2) * (1 - r21) / (p * (1 - p) * J * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_cra3r3(
rho2: float,
rho3: float,
n: float,
J: float,
K: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
g3: int = 0,
r21: int = 0,
r22: int = 0,
r23: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Three-level Cluster-randomized Trials to Detect Main, Moderation,
and Mediation Effects
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: int
Level 3 sample size
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0
Proportion of level 3 units randomly assigned to treatment
g3: int, default=0
Number of covariates at level 3
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
r23: float, default=0
Proportion of level 3 variance in the outcome explained by level 3 covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = K - g3 - 2
sse = sqrt(
rho3 * (1 - r23) / (p * (1 - p) * K)
+ rho2 * (1 - r22) / (p * (1 - p) * J * K)
+ (1 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_cra4r4(
rho2: float,
rho3: float,
rho4: float,
n: float,
J: float,
K: float,
L: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
r21: float = 0,
r22: float = 0,
r23: float = 0,
r24: float = 0,
g4: int = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Four-Level Cluster-randomized Trial
Parameters
----------
rho2: float
Proportion of variance in the outcome between level 2 units (unconditional ICC2)
rho3: float
Proportion of variance in the outcome between level 3 units (unconditional ICC3)
rho4: float
Proportion of variance in the outcome between level 4 units (unconditional ICC4)
n: float
Harmonic mean of level 1 units across level 2 units (or simple average)
J: float
Harmonic mean of level 2 units across level 3 units (or simple average)
K: float
Harmonic mean of level 3 units across level 4 units (or simple average)
L: int
Number of level 4 units
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0.5
Proportion of level 4 units randomly assigned to treatment
r21: float, default=0
Proportion of level 1 variance in the outcome explained by level 1 covariates
r22: float, default=0
Proportion of level 2 variance in the outcome explained by level 2 covariates
r23: float, default=0
Proportion of level 3 variance in the outcome explained by level 3 covariates
r24: float, default=0
Proportion of level 4 variance in the outcome explained by level 4 covariates
g4: int, default=0
Number of covariates at level 4
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = L - g4 - 2
sse = sqrt(
rho4 * (1 - r24) / (p * (1 - p) * L)
+ rho3 * (1 - r23) / (p * (1 - p) * K * L)
+ rho2 * (1 - r22) / (p * (1 - p) * J * K * L)
+ (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * J * K * L * n)
)
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
def mde_ira1r1(
n: int,
power: float = 0.80,
alpha: float = 0.10,
two_tailed: bool = True,
p: float = 0.50,
g1: int = 0,
r21: float = 0,
print_pretty: bool = True,
) -> Dict:
"""Calculates the Minimum Detectable Effect of a Individual-level Random Assignment Design
Parameters
----------
n: int
Sample size
power: float, default=0.8
Statistical power
alpha: float, default=0.1
Probability of Type 1 error
two_tailed: bool, default=True
Whether our hypothesis is one tailed or two tailed
p: float, default=0
Proportion of units randomly assigned to treatment
g1: int, default=0
Number of covariates
r21: float, default=0
Proportion of variance in the outcome explained by covariates
print_pretty: bool, default=True
Whether we wish to print the results similar to PowerUpR's output
Returns
-------
A dictionary containing the minimum_detectable effect as well as confidence intervals for said effect
"""
df = n - g1 - 2
sse = sqrt((1 - r21) / (p * (1 - p) * n))
mde = _mde(power, alpha, sse, df, two_tailed)
if print_pretty:
confidence_intervals = [round(i, 3) for i in mde[f'{int((1 - round(alpha, 2)) * 100)}% Confidence Interval']]
str_print = ("Minimum Detectable Effect Size" + "\n" + "-" * 39 + "\n" +
f" {round(mde['minimum_detectable_effect'], 3)} {int((1 - round(alpha, 2)) * 100)}% CI {confidence_intervals}"
+ "\n" + "-" * 39 + "\n" + f"Degrees of Freedom: {df}" + "\n" +
f"Standardized Standard Error: {round(sse, 3)}" + "\n" + f"Type I Error Rate: {round(alpha, 2)}" + "\n" +
f"Type II Error Rate: {round(1 - power, 2)}" + "\n"
+ f"Two-Tailed Test: {two_tailed}"
)
print(str_print)
return mde
| 39.727009 | 131 | 0.596667 | 5,707 | 41,038 | 4.251095 | 0.030839 | 0.030337 | 0.042331 | 0.041218 | 0.980627 | 0.973826 | 0.961832 | 0.958534 | 0.957463 | 0.949425 | 0 | 0.047205 | 0.300015 | 41,038 | 1,032 | 132 | 39.765504 | 0.797361 | 0.503996 | 0 | 0.849785 | 0 | 0.030043 | 0.293931 | 0.047458 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030043 | false | 0 | 0.006438 | 0 | 0.066524 | 0.120172 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d7b68ebbab14b1f629447184818e0bd2f75ddd4b | 1,059 | py | Python | tests/test_newhash.py | bsd-ac/obsd_crypt | 74ef4ed53e6118a5911eb463cde91346515ef9c2 | [
"ISC"
] | null | null | null | tests/test_newhash.py | bsd-ac/obsd_crypt | 74ef4ed53e6118a5911eb463cde91346515ef9c2 | [
"ISC"
] | null | null | null | tests/test_newhash.py | bsd-ac/obsd_crypt | 74ef4ed53e6118a5911eb463cde91346515ef9c2 | [
"ISC"
] | 1 | 2022-03-01T22:46:27.000Z | 2022-03-01T22:46:27.000Z | from random import choices
from string import ascii_uppercase, digits
from obsd_crypt import crypt_checkpass, crypt_newhash
def test_newhash_default():
pass_len = 24
for i in range(15):
password = ''.join(choices(ascii_uppercase + digits, k=pass_len))
password_hash = crypt_newhash(password)
assert (password_hash)
print(password_hash)
assert (crypt_checkpass(password, password_hash))
def test_newhash_neg():
pass_len = 24
for i in range(15):
password = ''.join(choices(ascii_uppercase + digits, k=pass_len))
password_hash = crypt_newhash(password, -1)
assert (password_hash)
print(password_hash)
assert (crypt_checkpass(password, password_hash))
def test_newhash_pos():
pass_len = 24
for i in range(15):
password = ''.join(choices(ascii_uppercase + digits, k=pass_len))
password_hash = crypt_newhash(password, 10)
assert (password_hash)
print(password_hash)
assert (crypt_checkpass(password, password_hash))
| 32.090909 | 73 | 0.686497 | 134 | 1,059 | 5.149254 | 0.246269 | 0.208696 | 0.115942 | 0.052174 | 0.805797 | 0.805797 | 0.805797 | 0.805797 | 0.805797 | 0.805797 | 0 | 0.018248 | 0.223796 | 1,059 | 32 | 74 | 33.09375 | 0.821168 | 0 | 0 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.222222 | 1 | 0.111111 | false | 0.703704 | 0.111111 | 0 | 0.222222 | 0.111111 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
cc301f5be91cfd4745ed2cd3302b3952dd6eabdc | 165,583 | py | Python | fffbnf_parser.py | thautwarm/frontend-for-free | 77189c1eee3f18080fe7464f3696b1bce44e9a1b | [
"BSD-3-Clause"
] | 28 | 2020-02-05T03:16:17.000Z | 2021-12-28T09:17:50.000Z | fffbnf_parser.py | thautwarm/frontend-for-free | 77189c1eee3f18080fe7464f3696b1bce44e9a1b | [
"BSD-3-Clause"
] | 1 | 2020-10-23T09:31:03.000Z | 2020-10-23T09:31:03.000Z | fffbnf_parser.py | thautwarm/frontend-for-free | 77189c1eee3f18080fe7464f3696b1bce44e9a1b | [
"BSD-3-Clause"
] | 1 | 2020-07-20T06:41:58.000Z | 2020-07-20T06:41:58.000Z |
"""
Copyright thautwarm (c) 2019
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of thautwarm nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from fffbnf_lex import lexer
from json import dumps
from collections import defaultdict
from io import StringIO
import warnings
import pathlib
import re
import sys
import attr
def unesc(s):
return eval(s)
@attr.s(hash=True)
class Include:
lang = attr.ib()
files = attr.ib()
@attr.s(hash=True)
class Params:
params = attr.ib(converter=tuple)
@attr.s(hash=True)
class Terminal:
kind = attr.ib()
value = attr.ib()
LITERAL = "LITERAL"
SYMBOL = "SYMBOL"
@attr.s(hash=True)
class NonTerminal:
name = attr.ib()
@attr.s(hash=True)
class Optional:
rule = attr.ib()
@attr.s(hash=True)
class MacroUse:
name = attr.ib()
args = attr.ib(converter=tuple)
@attr.s(hash=True)
class Bind:
name = attr.ib()
atom = attr.ib()
@attr.s(hash=True)
class Rewrite:
rule = attr.ib()
action = attr.ib()
Define = object()
Alias = object()
def _cond_conv(x):
if isinstance(x, list):
return tuple(x)
return x
@attr.s(hash=True)
class MacroDef:
name = attr.ib()
args = attr.ib(converter=tuple)
alts = attr.ib(converter=_cond_conv)
@attr.s(hash=True)
class Def:
name = attr.ib()
alts = attr.ib(converter=_cond_conv)
@attr.s(hash=True)
class Call:
f = attr.ib()
args = attr.ib(converter=tuple)
@attr.s(hash=True)
class Var:
name = attr.ib()
@attr.s(hash=True)
class Int:
i = attr.ib()
@attr.s(hash=True)
class List:
elts = attr.ib(converter=tuple)
@attr.s(hash=True)
class Attr:
subject = attr.ib()
attr = attr.ib()
@attr.s(hash=True)
class Stmts:
suite = attr.ib(converter=tuple)
def maybeStmts(suite):
if len(suite) == 1:
return suite[0]
return Stmts(suite)
@attr.s(hash=True)
class Tuple:
elts = attr.ib(converter=tuple)
def maybeTuple(elts):
if len(elts) is 1:
return elts[0]
return Tuple(elts)
@attr.s(hash=True)
class Seq:
ps = attr.ib(converter=tuple)
@attr.s(hash=True)
class Alt:
ps = attr.ib(converter=tuple)
def seq(xs):
if len(xs) is 1:
return xs[0]
return Seq(xs)
def alt(xs):
if len(xs) is 1:
return xs[0]
return Alt(xs)
@attr.s(hash=True)
class Slot:
i = attr.ib()
class Interpreter:
dispatches = {}
def __init__(self, **scope):
self.scope = scope
self.macros = {}
self.macro_use_cache = {}
self.expanded_rules = []
self.includes = []
self.params = []
def sub(self, **scope):
it = Interpreter(**scope)
it.macros = self.macros
it.macro_use_cache = self.macro_use_cache
it.expanded_rules = self.expanded_rules
it.includes = self.includes
return it
def eval(self, x):
return self.dispatches[type(x)](self, x)
@classmethod
def build(cls, xs):
top = cls()
io = StringIO()
print("[", file=io)
for each in xs:
top.eval(each)
print("\n, ".join(top.expanded_rules), file=io)
print("]", file=io)
return io.getvalue(), top.includes, top.params
def v_Include(self, x: Include):
self.includes.append((x.lang, x.files))
dispatches[Include] = v_Include
def v_Params(self, x: Params):
self.params = x.params
dispatches[Params] = v_Params
def v_Terminal(self, x: Terminal):
if x.kind is LITERAL:
lit = dumps(f"quote {x.value}")
elif x.kind is SYMBOL:
lit = dumps(x.value)
else:
raise Exception
return f"CTerm {lit}"
dispatches[Terminal] = v_Terminal
def v_NonTerminal(self, x: NonTerminal):
if x.name in self.scope:
return self.scope[x.name]
return f"CNonTerm {dumps(x.name)}"
dispatches[NonTerminal] = v_NonTerminal
def v_Optional(self, x: Optional):
return f"COpt ({self.eval(x.rule)})"
dispatches[Optional] = v_Optional
def v_Bind(self, x: Bind):
return f"CBind {dumps(x.name)} ({self.eval(x.atom)})"
dispatches[Bind] = v_Bind
def v_Rewrite(self, x: Rewrite):
if x.action:
action = f"Just ({self.eval(x.action)})"
else:
action = "Nothing"
return self.eval(x.rule), action
dispatches[Rewrite] = v_Rewrite
def v_Def(self, x: Def):
alts = x.alts
name = x.name
if isinstance(alts, tuple):
alts = map(self.eval, alts)
return self.expanded_rules.extend(
f"({dumps(name)}, ({alt}), {action})" for alt, action in alts
)
else:
expr = self.eval(alts)
return self.expanded_rules.append(
f"({dumps(name)}, ({expr}), Just (MSlot 1))"
)
dispatches[Def] = v_Def
def v_Seq(self, x: Seq):
ps = ", ".join(f"({self.eval(p)})" for p in x.ps)
return f"CSeq [{ps}]"
def v_Alt(self, x: Alt):
ps = ", ".join(f"({self.eval(p)})" for p in x.ps)
return f"CAlt [{ps}]"
dispatches[Seq] = v_Seq
dispatches[Alt] = v_Alt
def v_Call(self, x: Call):
f = self.eval(x.f)
args = (f"({self.eval(a)})" for a in x.args)
return f'MApp ({f}) [{", ".join(args)}]'
dispatches[Call] = v_Call
def v_Int(self, x: Int):
return f"MInt {x.i}"
dispatches[Int] = v_Int
def v_Slot(self, x: Int):
return f"MSlot {x.i}"
dispatches[Slot] = v_Slot
def v_Var(self, x: Var):
return f"MTerm {dumps(x.name)}"
dispatches[Var] = v_Var
def v_Tuple(self, x: Tuple):
args = (f"({self.eval(a)})" for a in x.elts)
return f'MTuple [{", ".join(args)}]'
dispatches[Tuple] = v_Tuple
def v_List(self, x: List):
args = list(f"({self.eval(a)})" for a in x.elts)
r = 'MApp (MBuiltin "empty_list") []'
f = 'MBuiltin "push_list"'
for a in args:
r = f"MApp ({f}) [{r}, {a}]"
return r
dispatches[List] = v_List
def v_Attr(self, x: Attr):
return 'MAttr ({}) {}'.format(self.eval(x.subject), dumps(x.attr))
dispatches[Attr] = v_Attr
def v_Stmts(self, x: Stmts):
args = list(f"({self.eval(a)})" for a in x.suite)
if len(args) == 1:
return args[0]
return "MCombine [%s]" % (', '.join(args))
dispatches[Stmts] = v_Stmts
def v_MacroDef(self, x: MacroDef):
self.macros[x.name] = x
return []
dispatches[MacroDef] = v_MacroDef
def v_MacroUse(self, x: MacroUse):
macro = self.macros[x.name]
params = tuple(map(self.eval, x.args))
assert len(params) == len(
macro.args
), f"filling macro {x.name}'s parameter incorrectly."
ident = (macro, params)
n = self.macro_use_cache.get(ident, None)
if n is None:
n = f"rbnfmacro_{len(self.macro_use_cache)}"
self.macro_use_cache[ident] = n
it = self.sub(**dict(zip(macro.args, params)))
it.eval(Def(n, macro.alts))
return f"CNonTerm {dumps(n)}"
dispatches[MacroUse] = v_MacroUse
def _find_n(s: str, ch, n: int):
since = 0
for i in range(0, n - 1):
since = s.find(ch, since)
return s[since : s.find(ch, since)]
def parse(text: str, filename: str = "unknown"):
_parse = mk_parser()
tokens = lexer(filename, text)
res = _parse(State(), Tokens(tokens))
if res[0]:
return res[1]
msgs = []
assert res[1]
maxline = 0
for each in res[1]:
i, msg = each
token = tokens[i]
lineno = token.lineno
maxline = max(lineno, maxline)
colno = token.colno
msgs.append(f"Line {lineno + 1}, column {colno}, {msg}")
e = SyntaxError()
e.lineno = maxline + 1
e.msg = "\n".join(msgs)
e.filename = filename
off = token.offset
e.offset = off
e.text = text[: text.find("\n", off)]
raise e
def build(filename: str, out_req: str, out_ff: str, lang: str = "python", parseronly: bool=False):
with open(filename) as f:
text = f.read()
readable, includes, params = Interpreter.build(parse(text, filename=filename))
if not parseronly:
parent_dir = pathlib.Path(filename).parent
with open(out_req, "w") as f:
for required_lang, files in includes:
if required_lang is None or required_lang == lang:
pass
else:
continue
for include in files:
if isinstance(include, tuple):
[include, _] = include
f.write(include[len("%%inline"):-len("%%")])
continue
include = parent_dir / include
try:
with include.open() as r:
f.write(r.read())
f.write("\n")
except FileNotFoundError:
warnings.warn(f"{include} not found")
with open(out_ff, "w") as f:
f.write(readable)
return params
def entry():
from wisepy2 import wise
wise(build)()
from json.decoder import py_scanstring
def unesc(s, py_scanstring=py_scanstring):
if s[0] == '"':
return py_scanstring(s, 1)[0]
return eval(s)
def ListUse(t):
return MacroUse("list", [t])
from typing import Generic, TypeVar
T = TypeVar('T')
class Tokens():
__slots__ = ['array', 'offset']
def __init__(self, array):
self.array = array
self.offset = 0
class State():
def __init__(self):
pass
class AST(Generic[T]):
__slots__ = ['tag', 'contents']
def __init__(self, tag: str, contents: T):
self.tag = tag
self.contents = contents
class Nil():
nil = None
__slots__ = []
def __init__(self):
if (Nil.nil is None):
Nil.nil = self
return
raise ValueError('Nil cannot get instantiated twice.')
def __len__(self):
return 0
def __getitem__(self, n):
raise IndexError('Out of bounds')
@property
def head(self):
raise IndexError('Out of bounds')
@property
def tail(self):
raise IndexError('Out of bounds')
def __repr__(self):
return '[]'
_nil = Nil()
class Cons():
__slots__ = ['head', 'tail']
def __init__(self, _head, _tail):
self.head = _head
self.tail = _tail
def __len__(self):
nil = _nil
l = 0
while (self is not nil):
l += 1
self = self.tail
return l
def __iter__(self):
nil = _nil
while (self is not nil):
(yield self.head)
self = self.tail
def __getitem__(self, n):
while (n != 0):
self = self.tail
n -= 1
return self.head
def __repr__(self):
return repr(list(self))
try:
def mk_pretty():
from prettyprinter import register_pretty, pretty_call, pprint
@register_pretty(Tokens)
def pretty_tokens(value, ctx):
return pretty_call(ctx, Tokens, offset=value.offset, array=value.array)
@register_pretty(AST)
def pretty_ast(value, ctx):
return pretty_call(ctx, AST, tag=value.tag, contents=value.contents)
mk_pretty()
del mk_pretty
except ImportError:
pass
del T, Generic, TypeVar
builtin_cons = Cons
builtin_nil = _nil
builtin_mk_ast = AST
def mk_parser():
pass
def rbnf_named_lr_step_lang(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 5):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote ( not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = builtin_tokens.offset
rbnf_named__off_1 = lcl_1
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_1 = _rbnf_peek_tmp
if lcl_1:
lcl_3 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_3 = lcl_3.idint
if (lcl_3 == 7):
lcl_4 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_4
lcl_4 = rbnf_named__check_2[0]
lcl_4 = (lcl_4 == False)
if lcl_4:
lcl_4 = rbnf_named__check_2
else:
lcl_5 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_5
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_5 = _rbnf_cur_token
rbnf_tmp_3 = lcl_5
lcl_5 = (rbnf_tmp_3 is None)
if lcl_5:
lcl_6 = builtin_tokens.offset
lcl_6 = (lcl_6, 'quote ) not match')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_5 = lcl_6
else:
lcl_6 = Call(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_6
lcl_6 = (True, rbnf_tmp_1_)
lcl_5 = lcl_6
lcl_4 = lcl_5
lcl_2 = lcl_4
elif (lcl_3 == 6):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_4 = _rbnf_cur_token
rbnf_tmp_2 = lcl_4
lcl_4 = []
lcl_4 = Call(rbnf_tmp_0, lcl_4)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_2 = lcl_4
elif (lcl_3 == 5):
lcl_4 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_4
lcl_4 = rbnf_named__check_2[0]
lcl_4 = (lcl_4 == False)
if lcl_4:
lcl_4 = rbnf_named__check_2
else:
lcl_5 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_5
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_5 = _rbnf_cur_token
rbnf_tmp_3 = lcl_5
lcl_5 = (rbnf_tmp_3 is None)
if lcl_5:
lcl_6 = builtin_tokens.offset
lcl_6 = (lcl_6, 'quote ) not match')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_5 = lcl_6
else:
lcl_6 = Call(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_6
lcl_6 = (True, rbnf_tmp_1_)
lcl_5 = lcl_6
lcl_4 = lcl_5
lcl_2 = lcl_4
elif (lcl_3 == 22):
lcl_4 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_4
lcl_4 = rbnf_named__check_2[0]
lcl_4 = (lcl_4 == False)
if lcl_4:
lcl_4 = rbnf_named__check_2
else:
lcl_5 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_5
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_5 = _rbnf_cur_token
rbnf_tmp_3 = lcl_5
lcl_5 = (rbnf_tmp_3 is None)
if lcl_5:
lcl_6 = builtin_tokens.offset
lcl_6 = (lcl_6, 'quote ) not match')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_5 = lcl_6
else:
lcl_6 = Call(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_6
lcl_6 = (True, rbnf_tmp_1_)
lcl_5 = lcl_6
lcl_4 = lcl_5
lcl_2 = lcl_4
elif (lcl_3 == 21):
lcl_4 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_4
lcl_4 = rbnf_named__check_2[0]
lcl_4 = (lcl_4 == False)
if lcl_4:
lcl_4 = rbnf_named__check_2
else:
lcl_5 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_5
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_5 = _rbnf_cur_token
rbnf_tmp_3 = lcl_5
lcl_5 = (rbnf_tmp_3 is None)
if lcl_5:
lcl_6 = builtin_tokens.offset
lcl_6 = (lcl_6, 'quote ) not match')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_5 = lcl_6
else:
lcl_6 = Call(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_6
lcl_6 = (True, rbnf_tmp_1_)
lcl_5 = lcl_6
lcl_4 = lcl_5
lcl_2 = lcl_4
elif (lcl_3 == 1):
lcl_4 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_4
lcl_4 = rbnf_named__check_2[0]
lcl_4 = (lcl_4 == False)
if lcl_4:
lcl_4 = rbnf_named__check_2
else:
lcl_5 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_5
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_5 = _rbnf_cur_token
rbnf_tmp_3 = lcl_5
lcl_5 = (rbnf_tmp_3 is None)
if lcl_5:
lcl_6 = builtin_tokens.offset
lcl_6 = (lcl_6, 'quote ) not match')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_5 = lcl_6
else:
lcl_6 = Call(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_6
lcl_6 = (True, rbnf_tmp_1_)
lcl_5 = lcl_6
lcl_4 = lcl_5
lcl_2 = lcl_4
else:
lcl_4 = (rbnf_named__off_1, 'lang lookahead failed')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_2 = lcl_4
lcl_1 = lcl_2
else:
lcl_2 = (rbnf_named__off_1, 'lang got EOF')
lcl_2 = builtin_cons(lcl_2, builtin_nil)
lcl_2 = (False, lcl_2)
lcl_1 = lcl_2
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_lang(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_lang_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_lang(rbnf_named_lr_lang_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_lang_try = lcl_0
lcl_0 = rbnf_named_lr_lang_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_lang_try[1]
rbnf_named_lr_lang_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_lang(rbnf_named_lr_lang_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_lang_try = lcl_1
lcl_1 = rbnf_named_lr_lang_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_lang_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_lang_try
return lcl_0
def rbnf_named_lr_step_lang_atom(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 23):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote . not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = Attr(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_2
lcl_2 = (True, rbnf_tmp_1_)
lcl_1 = lcl_2
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_lang_atom(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_lang_atom_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_lang_atom(rbnf_named_lr_lang_atom_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_lang_atom_try = lcl_0
lcl_0 = rbnf_named_lr_lang_atom_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_lang_atom_try[1]
rbnf_named_lr_lang_atom_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_lang_atom(rbnf_named_lr_lang_atom_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_lang_atom_try = lcl_1
lcl_1 = rbnf_named_lr_lang_atom_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_lang_atom_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_lang_atom_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_0(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 0):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote , not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_expr(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = rbnf_tmp_0.append
lcl_2 = lcl_2(rbnf_tmp_2)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_0(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_0_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_0(rbnf_named_lr_rbnfmacro_0_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_0_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_0_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_0_try[1]
rbnf_named_lr_rbnfmacro_0_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_0(rbnf_named_lr_rbnfmacro_0_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_0_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_0_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_0_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_0_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_1(rbnf_tmp_0, builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_atomExpr(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_0
lcl_0 = rbnf_named__check_1[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_1
else:
lcl_1 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_1
lcl_1 = rbnf_tmp_0.append
lcl_1 = lcl_1(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_2 = (True, rbnf_tmp_1_)
lcl_0 = lcl_2
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_1(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_1_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_1(rbnf_named_lr_rbnfmacro_1_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_1_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_1_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_1_try[1]
rbnf_named_lr_rbnfmacro_1_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_1(rbnf_named_lr_rbnfmacro_1_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_1_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_1_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_1_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_1_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_2(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 13):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote | not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_cseq(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = rbnf_tmp_0.append
lcl_2 = lcl_2(rbnf_tmp_2)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_2(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_2_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_2(rbnf_named_lr_rbnfmacro_2_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_2_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_2_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_2_try[1]
rbnf_named_lr_rbnfmacro_2_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_2(rbnf_named_lr_rbnfmacro_2_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_2_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_2_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_2_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_2_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_3(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 13):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote | not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_rewrite(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = rbnf_tmp_0.append
lcl_2 = lcl_2(rbnf_tmp_2)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_3(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_3_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_3(rbnf_named_lr_rbnfmacro_3_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_3_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_3_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_3_try[1]
rbnf_named_lr_rbnfmacro_3_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_3(rbnf_named_lr_rbnfmacro_3_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_3_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_3_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_3_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_3_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_4(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 0):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote , not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = rbnf_tmp_0.append
lcl_2 = lcl_2(rbnf_tmp_2)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_4(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_4_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_4(rbnf_named_lr_rbnfmacro_4_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_4_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_4_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_4_try[1]
rbnf_named_lr_rbnfmacro_4_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_4(rbnf_named_lr_rbnfmacro_4_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_4_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_4_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_4_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_4_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_5(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 20):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote ; not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_lang(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = rbnf_tmp_0.append
lcl_2 = lcl_2(rbnf_tmp_2)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_5(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_5_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_5(rbnf_named_lr_rbnfmacro_5_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_5_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_5_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_5_try[1]
rbnf_named_lr_rbnfmacro_5_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_5(rbnf_named_lr_rbnfmacro_5_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_5_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_5_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_5_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_5_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_6(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 0):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote , not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_lang(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = rbnf_tmp_0.append
lcl_2 = lcl_2(rbnf_tmp_2)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_6(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_6_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_6(rbnf_named_lr_rbnfmacro_6_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_6_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_6_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_6_try[1]
rbnf_named_lr_rbnfmacro_6_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_6(rbnf_named_lr_rbnfmacro_6_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_6_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_6_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_6_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_6_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_7(rbnf_tmp_0, builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 0):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_1 = lcl_0
lcl_0 = (rbnf_tmp_1 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'quote , not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_filename(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_1
lcl_1 = rbnf_named__check_2[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_2
else:
lcl_2 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_2
lcl_2 = rbnf_tmp_0.append
lcl_2 = lcl_2(rbnf_tmp_2)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_7(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_7_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_7(rbnf_named_lr_rbnfmacro_7_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_7_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_7_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_7_try[1]
rbnf_named_lr_rbnfmacro_7_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_7(rbnf_named_lr_rbnfmacro_7_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_7_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_7_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_7_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_7_try
return lcl_0
def rbnf_named_lr_step_rbnfmacro_8(rbnf_tmp_0, builtin_state, builtin_tokens):
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_0 = _rbnf_peek_tmp
if lcl_0:
lcl_2 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_2 = lcl_2.idint
if (lcl_2 == 25):
lcl_3 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
lcl_4 = rbnf_tmp_0.append
lcl_4 = lcl_4(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_5 = (True, rbnf_tmp_1_)
lcl_3 = lcl_5
lcl_1 = lcl_3
elif (lcl_2 == 24):
lcl_3 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_5 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_5
lcl_5 = rbnf_tmp_0.append
lcl_5 = lcl_5(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_6 = (True, rbnf_tmp_1_)
lcl_3 = lcl_6
lcl_1 = lcl_3
elif (lcl_2 == 1):
lcl_3 = builtin_tokens.offset
rbnf_named__off_1 = lcl_3
try:
builtin_tokens.array[(builtin_tokens.offset + 1)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_3 = _rbnf_peek_tmp
if lcl_3:
lcl_7 = builtin_tokens.array[(builtin_tokens.offset + 1)]
lcl_7 = lcl_7.idint
if (lcl_7 == 7):
lcl_8 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_8
lcl_8 = rbnf_named__check_1[0]
lcl_8 = (lcl_8 == False)
if lcl_8:
lcl_8 = rbnf_named__check_1
else:
lcl_9 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_9
lcl_9 = rbnf_tmp_0.append
lcl_9 = lcl_9(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_10 = (True, rbnf_tmp_1_)
lcl_8 = lcl_10
lcl_6 = lcl_8
elif (lcl_7 == 19):
lcl_10 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_8 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_8
lcl_8 = rbnf_tmp_0.append
lcl_8 = lcl_8(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_6 = lcl_10
elif (lcl_7 == 17):
lcl_10 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
lcl_11 = rbnf_tmp_0.append
lcl_11 = lcl_11(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_12 = (True, rbnf_tmp_1_)
lcl_10 = lcl_12
lcl_6 = lcl_10
elif (lcl_7 == 18):
lcl_10 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_12 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_12
lcl_12 = rbnf_tmp_0.append
lcl_12 = lcl_12(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_13 = (True, rbnf_tmp_1_)
lcl_10 = lcl_13
lcl_6 = lcl_10
elif (lcl_7 == 16):
lcl_10 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_13 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_13
lcl_13 = rbnf_tmp_0.append
lcl_13 = lcl_13(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_14 = (True, rbnf_tmp_1_)
lcl_10 = lcl_14
lcl_6 = lcl_10
elif (lcl_7 == 26):
lcl_10 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_14 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_14
lcl_14 = rbnf_tmp_0.append
lcl_14 = lcl_14(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_15 = (True, rbnf_tmp_1_)
lcl_10 = lcl_15
lcl_6 = lcl_10
else:
lcl_10 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_15 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_15
lcl_15 = rbnf_tmp_0.append
lcl_15 = lcl_15(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_16 = (True, rbnf_tmp_1_)
lcl_10 = lcl_16
lcl_6 = lcl_10
lcl_3 = lcl_6
else:
lcl_10 = (rbnf_named__off_1, 'rbnfmacro_8 got EOF')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_3 = lcl_10
lcl_1 = lcl_3
elif (lcl_2 == 26):
lcl_10 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_16 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_16
lcl_16 = rbnf_tmp_0.append
lcl_16 = lcl_16(rbnf_tmp_1)
rbnf_tmp_1_ = rbnf_tmp_0
lcl_3 = (True, rbnf_tmp_1_)
lcl_10 = lcl_3
lcl_1 = lcl_10
else:
lcl_10 = (rbnf_named__off_0, 'rbnfmacro_8 lookahead failed')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_1 = lcl_10
lcl_0 = lcl_1
else:
lcl_1 = (rbnf_named__off_0, 'rbnfmacro_8 got EOF')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_0, builtin_state, builtin_tokens):
rbnf_named_lr_rbnfmacro_8_reduce = rbnf_tmp_0
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
lcl_0 = rbnf_named_lr_step_rbnfmacro_8(rbnf_named_lr_rbnfmacro_8_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_8_try = lcl_0
lcl_0 = rbnf_named_lr_rbnfmacro_8_try[0]
lcl_0 = (lcl_0 is not False)
while lcl_0:
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_8_try[1]
rbnf_named_lr_rbnfmacro_8_reduce = lcl_1
lcl_1 = rbnf_named_lr_step_rbnfmacro_8(rbnf_named_lr_rbnfmacro_8_reduce, builtin_state, builtin_tokens)
rbnf_named_lr_rbnfmacro_8_try = lcl_1
lcl_1 = rbnf_named_lr_rbnfmacro_8_try[0]
lcl_1 = (lcl_1 is not False)
lcl_0 = lcl_1
lcl_0 = builtin_tokens.offset
lcl_0 = (lcl_0 == rbnf_named__off_0)
if lcl_0:
lcl_1 = (True, rbnf_named_lr_rbnfmacro_8_reduce)
lcl_0 = lcl_1
else:
lcl_0 = rbnf_named_lr_rbnfmacro_8_try
return lcl_0
def rbnf_named_parse_Ident(builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 1):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_0 = lcl_0
lcl_0 = (rbnf_tmp_0 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'Ident not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_tmp_0.value
rbnf_tmp_1_ = lcl_1
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_IdentList(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rbnfmacro_4(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
rbnf_tmp_1_ = rbnf_tmp_0
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_START(builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 27):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_0 = lcl_0
lcl_0 = (rbnf_tmp_0 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'BOF not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_named_parse_rbnfmacro_8(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_1
lcl_1 = rbnf_named__check_1[0]
lcl_1 = (lcl_1 == False)
if lcl_1:
lcl_1 = rbnf_named__check_1
else:
lcl_2 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_2
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 28):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_2 = _rbnf_cur_token
rbnf_tmp_2 = lcl_2
lcl_2 = (rbnf_tmp_2 is None)
if lcl_2:
lcl_3 = builtin_tokens.offset
lcl_3 = (lcl_3, 'EOF not match')
lcl_3 = builtin_cons(lcl_3, builtin_nil)
lcl_3 = (False, lcl_3)
lcl_2 = lcl_3
else:
rbnf_tmp_1_ = rbnf_tmp_1
lcl_3 = (True, rbnf_tmp_1_)
lcl_2 = lcl_3
lcl_1 = lcl_2
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_alts(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rbnfmacro_3(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
rbnf_tmp_1_ = rbnf_tmp_0
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_atom(builtin_state, builtin_tokens):
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_0 = _rbnf_peek_tmp
if lcl_0:
lcl_2 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_2 = lcl_2.idint
if (lcl_2 == 3):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 4):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_4 = _rbnf_cur_token
rbnf_tmp_2 = lcl_4
lcl_4 = (rbnf_tmp_2 is None)
if lcl_4:
lcl_5 = builtin_tokens.offset
lcl_5 = (lcl_5, 'quote > not match')
lcl_5 = builtin_cons(lcl_5, builtin_nil)
lcl_5 = (False, lcl_5)
lcl_4 = lcl_5
else:
lcl_5 = Terminal(SYMBOL, rbnf_tmp_1)
rbnf_tmp_1_ = lcl_5
lcl_5 = (True, rbnf_tmp_1_)
lcl_4 = lcl_5
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 5):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_named_parse_expr(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_4 = _rbnf_cur_token
rbnf_tmp_2 = lcl_4
lcl_4 = (rbnf_tmp_2 is None)
if lcl_4:
lcl_5 = builtin_tokens.offset
lcl_5 = (lcl_5, 'quote ) not match')
lcl_5 = builtin_cons(lcl_5, builtin_nil)
lcl_5 = (False, lcl_5)
lcl_4 = lcl_5
else:
rbnf_tmp_1_ = rbnf_tmp_1
lcl_5 = (True, rbnf_tmp_1_)
lcl_4 = lcl_5
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 2):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_tmp_0.value
lcl_3 = unesc(lcl_3)
lcl_3 = Terminal(LITERAL, lcl_3)
rbnf_tmp_1_ = lcl_3
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
elif (lcl_2 == 1):
lcl_3 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_3
lcl_3 = rbnf_named__check_0[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_0
else:
lcl_4 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_4
lcl_4 = builtin_tokens.offset
rbnf_named__off_1 = lcl_4
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_4 = _rbnf_peek_tmp
if lcl_4:
lcl_6 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_6 = lcl_6.idint
if (lcl_6 == 7):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_7 = _rbnf_cur_token
rbnf_tmp_1 = lcl_7
lcl_7 = rbnf_named_parse_expr_lst(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_7
lcl_7 = rbnf_named__check_2[0]
lcl_7 = (lcl_7 == False)
if lcl_7:
lcl_7 = rbnf_named__check_2
else:
lcl_8 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_8
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 8):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_8 = _rbnf_cur_token
rbnf_tmp_3 = lcl_8
lcl_8 = (rbnf_tmp_3 is None)
if lcl_8:
lcl_9 = builtin_tokens.offset
lcl_9 = (lcl_9, 'quote ] not match')
lcl_9 = builtin_cons(lcl_9, builtin_nil)
lcl_9 = (False, lcl_9)
lcl_8 = lcl_9
else:
lcl_9 = MacroUse(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_9
lcl_9 = (True, rbnf_tmp_1_)
lcl_8 = lcl_9
lcl_7 = lcl_8
lcl_5 = lcl_7
elif (lcl_6 == 9):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_7 = _rbnf_cur_token
rbnf_tmp_1 = lcl_7
lcl_7 = rbnf_named_parse_atomExpr(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_7
lcl_7 = rbnf_named__check_2[0]
lcl_7 = (lcl_7 == False)
if lcl_7:
lcl_7 = rbnf_named__check_2
else:
lcl_8 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_8
lcl_8 = Bind(rbnf_tmp_0, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_8
lcl_8 = (True, rbnf_tmp_1_)
lcl_7 = lcl_8
lcl_5 = lcl_7
else:
lcl_7 = NonTerminal(rbnf_tmp_0)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_5 = lcl_7
lcl_4 = lcl_5
else:
lcl_5 = (rbnf_named__off_1, 'atom got EOF')
lcl_5 = builtin_cons(lcl_5, builtin_nil)
lcl_5 = (False, lcl_5)
lcl_4 = lcl_5
lcl_3 = lcl_4
lcl_1 = lcl_3
else:
lcl_3 = (rbnf_named__off_0, 'atom lookahead failed')
lcl_3 = builtin_cons(lcl_3, builtin_nil)
lcl_3 = (False, lcl_3)
lcl_1 = lcl_3
lcl_0 = lcl_1
else:
lcl_1 = (rbnf_named__off_0, 'atom got EOF')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_atomExpr(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_atom(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_1 = _rbnf_peek_tmp
if lcl_1:
lcl_3 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_3 = lcl_3.idint
if (lcl_3 == 10):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_4 = _rbnf_cur_token
rbnf_tmp_1 = lcl_4
lcl_4 = Optional(rbnf_tmp_0)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_2 = lcl_4
elif (lcl_3 == 12):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_4 = _rbnf_cur_token
rbnf_tmp_1 = lcl_4
lcl_4 = ListUse(rbnf_tmp_0)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_2 = lcl_4
elif (lcl_3 == 11):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_4 = _rbnf_cur_token
rbnf_tmp_1 = lcl_4
lcl_4 = ListUse(rbnf_tmp_0)
lcl_4 = Optional(lcl_4)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_2 = lcl_4
else:
rbnf_tmp_1_ = rbnf_tmp_0
lcl_4 = (True, rbnf_tmp_1_)
lcl_2 = lcl_4
lcl_1 = lcl_2
else:
lcl_2 = (rbnf_named__off_0, 'atomExpr got EOF')
lcl_2 = builtin_cons(lcl_2, builtin_nil)
lcl_2 = (False, lcl_2)
lcl_1 = lcl_2
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_cseq(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_seq(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = seq(rbnf_tmp_0)
rbnf_tmp_1_ = lcl_1
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_def(builtin_state, builtin_tokens):
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_0 = _rbnf_peek_tmp
if lcl_0:
lcl_2 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_2 = lcl_2.idint
if (lcl_2 == 19):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_named_parse_expr(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
rbnf_tmp_1_ = rbnf_tmp_1
lcl_4 = (True, rbnf_tmp_1_)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 17):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_named_parse_alts(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
rbnf_tmp_1_ = rbnf_tmp_1
lcl_4 = (True, rbnf_tmp_1_)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 18):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_named_parse_alts(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
rbnf_tmp_1_ = rbnf_tmp_1
lcl_4 = (True, rbnf_tmp_1_)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 16):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_named_parse_alts(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
rbnf_tmp_1_ = rbnf_tmp_1
lcl_4 = (True, rbnf_tmp_1_)
lcl_3 = lcl_4
lcl_1 = lcl_3
else:
lcl_3 = (rbnf_named__off_0, 'def lookahead failed')
lcl_3 = builtin_cons(lcl_3, builtin_nil)
lcl_3 = (False, lcl_3)
lcl_1 = lcl_3
lcl_0 = lcl_1
else:
lcl_1 = (rbnf_named__off_0, 'def got EOF')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_expr(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rbnfmacro_2(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = alt(rbnf_tmp_0)
rbnf_tmp_1_ = lcl_1
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_expr_lst(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rbnfmacro_0(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
rbnf_tmp_1_ = rbnf_tmp_0
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_filename(builtin_state, builtin_tokens):
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 2):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_0 = _rbnf_cur_token
rbnf_tmp_0 = lcl_0
lcl_0 = (rbnf_tmp_0 is None)
if lcl_0:
lcl_1 = builtin_tokens.offset
lcl_1 = (lcl_1, 'QuotedStr not match')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
else:
lcl_1 = rbnf_tmp_0.value
lcl_1 = unesc(lcl_1)
rbnf_tmp_1_ = lcl_1
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_lang(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_lang_atom(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
rbnf_tmp_1_ = rbnf_tmp_0
lcl_1 = rbnf_named_lr_loop_lang(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_lang_atom(builtin_state, builtin_tokens):
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_0 = _rbnf_peek_tmp
if lcl_0:
lcl_2 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_2 = lcl_2.idint
if (lcl_2 == 7):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = builtin_tokens.offset
rbnf_named__off_1 = lcl_3
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_3 = _rbnf_peek_tmp
if lcl_3:
lcl_5 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_5 = lcl_5.idint
if (lcl_5 == 8):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_6 = _rbnf_cur_token
rbnf_tmp_1 = lcl_6
lcl_6 = []
lcl_6 = List(lcl_6)
rbnf_tmp_1_ = lcl_6
lcl_6 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_4 = lcl_6
elif (lcl_5 == 7):
lcl_6 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 8):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_7 = _rbnf_cur_token
rbnf_tmp_2 = lcl_7
lcl_7 = (rbnf_tmp_2 is None)
if lcl_7:
lcl_8 = builtin_tokens.offset
lcl_8 = (lcl_8, 'quote ] not match')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_7 = lcl_8
else:
lcl_8 = List(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_7 = lcl_8
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 5):
lcl_6 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 8):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_7 = _rbnf_cur_token
rbnf_tmp_2 = lcl_7
lcl_7 = (rbnf_tmp_2 is None)
if lcl_7:
lcl_8 = builtin_tokens.offset
lcl_8 = (lcl_8, 'quote ] not match')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_7 = lcl_8
else:
lcl_8 = List(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_7 = lcl_8
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 22):
lcl_6 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 8):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_7 = _rbnf_cur_token
rbnf_tmp_2 = lcl_7
lcl_7 = (rbnf_tmp_2 is None)
if lcl_7:
lcl_8 = builtin_tokens.offset
lcl_8 = (lcl_8, 'quote ] not match')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_7 = lcl_8
else:
lcl_8 = List(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_7 = lcl_8
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 21):
lcl_6 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 8):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_7 = _rbnf_cur_token
rbnf_tmp_2 = lcl_7
lcl_7 = (rbnf_tmp_2 is None)
if lcl_7:
lcl_8 = builtin_tokens.offset
lcl_8 = (lcl_8, 'quote ] not match')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_7 = lcl_8
else:
lcl_8 = List(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_7 = lcl_8
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 1):
lcl_6 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 8):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_7 = _rbnf_cur_token
rbnf_tmp_2 = lcl_7
lcl_7 = (rbnf_tmp_2 is None)
if lcl_7:
lcl_8 = builtin_tokens.offset
lcl_8 = (lcl_8, 'quote ] not match')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_7 = lcl_8
else:
lcl_8 = List(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_7 = lcl_8
lcl_6 = lcl_7
lcl_4 = lcl_6
else:
lcl_6 = (rbnf_named__off_1, 'lang_atom lookahead failed')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_4 = lcl_6
lcl_3 = lcl_4
else:
lcl_4 = (rbnf_named__off_1, 'lang_atom got EOF')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 5):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = builtin_tokens.offset
rbnf_named__off_1 = lcl_3
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_3 = _rbnf_peek_tmp
if lcl_3:
lcl_5 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_5 = lcl_5.idint
if (lcl_5 == 7):
lcl_6 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
lcl_7 = builtin_tokens.offset
rbnf_named__off_2 = lcl_7
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_7 = _rbnf_peek_tmp
if lcl_7:
lcl_9 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_9 = lcl_9.idint
if (lcl_9 == 0):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_10 = _rbnf_cur_token
rbnf_tmp_2 = lcl_10
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_10 = _rbnf_cur_token
rbnf_tmp_3 = lcl_10
lcl_10 = (rbnf_tmp_3 is None)
if lcl_10:
lcl_11 = builtin_tokens.offset
lcl_11 = (lcl_11, 'quote ) not match')
lcl_11 = builtin_cons(lcl_11, builtin_nil)
lcl_11 = (False, lcl_11)
lcl_10 = lcl_11
else:
lcl_11 = Tuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_11
lcl_11 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_10 = lcl_11
lcl_8 = lcl_10
elif (lcl_9 == 6):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_10 = _rbnf_cur_token
rbnf_tmp_2 = lcl_10
lcl_10 = maybeTuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_10
lcl_10 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_8 = lcl_10
else:
lcl_10 = (rbnf_named__off_2, 'lang_atom lookahead failed')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_8 = lcl_10
lcl_7 = lcl_8
else:
lcl_10 = (rbnf_named__off_2, 'lang_atom got EOF')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_7 = lcl_10
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 6):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_10 = _rbnf_cur_token
rbnf_tmp_1 = lcl_10
lcl_10 = []
lcl_10 = Tuple(lcl_10)
rbnf_tmp_1_ = lcl_10
lcl_10 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_4 = lcl_10
elif (lcl_5 == 5):
lcl_10 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
lcl_11 = builtin_tokens.offset
rbnf_named__off_2 = lcl_11
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_11 = _rbnf_peek_tmp
if lcl_11:
lcl_7 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_7 = lcl_7.idint
if (lcl_7 == 0):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_8 = _rbnf_cur_token
rbnf_tmp_3 = lcl_8
lcl_8 = (rbnf_tmp_3 is None)
if lcl_8:
lcl_9 = builtin_tokens.offset
lcl_9 = (lcl_9, 'quote ) not match')
lcl_9 = builtin_cons(lcl_9, builtin_nil)
lcl_9 = (False, lcl_9)
lcl_8 = lcl_9
else:
lcl_9 = Tuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_9
lcl_9 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_8 = lcl_9
lcl_6 = lcl_8
elif (lcl_7 == 6):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
lcl_8 = maybeTuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_8
else:
lcl_8 = (rbnf_named__off_2, 'lang_atom lookahead failed')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_6 = lcl_8
lcl_11 = lcl_6
else:
lcl_6 = (rbnf_named__off_2, 'lang_atom got EOF')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_11 = lcl_6
lcl_10 = lcl_11
lcl_4 = lcl_10
elif (lcl_5 == 22):
lcl_10 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
lcl_11 = builtin_tokens.offset
rbnf_named__off_2 = lcl_11
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_11 = _rbnf_peek_tmp
if lcl_11:
lcl_7 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_7 = lcl_7.idint
if (lcl_7 == 0):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_8 = _rbnf_cur_token
rbnf_tmp_3 = lcl_8
lcl_8 = (rbnf_tmp_3 is None)
if lcl_8:
lcl_9 = builtin_tokens.offset
lcl_9 = (lcl_9, 'quote ) not match')
lcl_9 = builtin_cons(lcl_9, builtin_nil)
lcl_9 = (False, lcl_9)
lcl_8 = lcl_9
else:
lcl_9 = Tuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_9
lcl_9 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_8 = lcl_9
lcl_6 = lcl_8
elif (lcl_7 == 6):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
lcl_8 = maybeTuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_8
else:
lcl_8 = (rbnf_named__off_2, 'lang_atom lookahead failed')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_6 = lcl_8
lcl_11 = lcl_6
else:
lcl_6 = (rbnf_named__off_2, 'lang_atom got EOF')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_11 = lcl_6
lcl_10 = lcl_11
lcl_4 = lcl_10
elif (lcl_5 == 21):
lcl_10 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
lcl_11 = builtin_tokens.offset
rbnf_named__off_2 = lcl_11
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_11 = _rbnf_peek_tmp
if lcl_11:
lcl_7 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_7 = lcl_7.idint
if (lcl_7 == 0):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_8 = _rbnf_cur_token
rbnf_tmp_3 = lcl_8
lcl_8 = (rbnf_tmp_3 is None)
if lcl_8:
lcl_9 = builtin_tokens.offset
lcl_9 = (lcl_9, 'quote ) not match')
lcl_9 = builtin_cons(lcl_9, builtin_nil)
lcl_9 = (False, lcl_9)
lcl_8 = lcl_9
else:
lcl_9 = Tuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_9
lcl_9 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_8 = lcl_9
lcl_6 = lcl_8
elif (lcl_7 == 6):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
lcl_8 = maybeTuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_8
else:
lcl_8 = (rbnf_named__off_2, 'lang_atom lookahead failed')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_6 = lcl_8
lcl_11 = lcl_6
else:
lcl_6 = (rbnf_named__off_2, 'lang_atom got EOF')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_11 = lcl_6
lcl_10 = lcl_11
lcl_4 = lcl_10
elif (lcl_5 == 1):
lcl_10 = rbnf_named_parse_lang_lst(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
lcl_11 = builtin_tokens.offset
rbnf_named__off_2 = lcl_11
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_11 = _rbnf_peek_tmp
if lcl_11:
lcl_7 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_7 = lcl_7.idint
if (lcl_7 == 0):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 6):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_8 = _rbnf_cur_token
rbnf_tmp_3 = lcl_8
lcl_8 = (rbnf_tmp_3 is None)
if lcl_8:
lcl_9 = builtin_tokens.offset
lcl_9 = (lcl_9, 'quote ) not match')
lcl_9 = builtin_cons(lcl_9, builtin_nil)
lcl_9 = (False, lcl_9)
lcl_8 = lcl_9
else:
lcl_9 = Tuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_9
lcl_9 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_8 = lcl_9
lcl_6 = lcl_8
elif (lcl_7 == 6):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_8 = _rbnf_cur_token
rbnf_tmp_2 = lcl_8
lcl_8 = maybeTuple(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_8
lcl_8 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_8
else:
lcl_8 = (rbnf_named__off_2, 'lang_atom lookahead failed')
lcl_8 = builtin_cons(lcl_8, builtin_nil)
lcl_8 = (False, lcl_8)
lcl_6 = lcl_8
lcl_11 = lcl_6
else:
lcl_6 = (rbnf_named__off_2, 'lang_atom got EOF')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_11 = lcl_6
lcl_10 = lcl_11
lcl_4 = lcl_10
else:
lcl_10 = (rbnf_named__off_1, 'lang_atom lookahead failed')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_4 = lcl_10
lcl_3 = lcl_4
else:
lcl_10 = (rbnf_named__off_1, 'lang_atom got EOF')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_3 = lcl_10
lcl_1 = lcl_3
elif (lcl_2 == 22):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_10 = _rbnf_cur_token
rbnf_tmp_0 = lcl_10
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 21):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_10 = _rbnf_cur_token
rbnf_tmp_1 = lcl_10
lcl_10 = (rbnf_tmp_1 is None)
if lcl_10:
lcl_11 = builtin_tokens.offset
lcl_11 = (lcl_11, 'Int not match')
lcl_11 = builtin_cons(lcl_11, builtin_nil)
lcl_11 = (False, lcl_11)
lcl_10 = lcl_11
else:
lcl_11 = rbnf_tmp_1.value
lcl_11 = int(lcl_11)
lcl_11 = Slot(lcl_11)
rbnf_tmp_1_ = lcl_11
lcl_11 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_10 = lcl_11
lcl_1 = lcl_10
elif (lcl_2 == 21):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_10 = _rbnf_cur_token
rbnf_tmp_0 = lcl_10
lcl_10 = rbnf_tmp_0.value
lcl_10 = int(lcl_10)
lcl_10 = Int(lcl_10)
rbnf_tmp_1_ = lcl_10
lcl_10 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_1 = lcl_10
elif (lcl_2 == 1):
lcl_10 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_10
lcl_10 = rbnf_named__check_0[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_0
else:
lcl_11 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_11
lcl_11 = Var(rbnf_tmp_0)
rbnf_tmp_1_ = lcl_11
lcl_11 = rbnf_named_lr_loop_lang_atom(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_10 = lcl_11
lcl_1 = lcl_10
else:
lcl_10 = (rbnf_named__off_0, 'lang_atom lookahead failed')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_1 = lcl_10
lcl_0 = lcl_1
else:
lcl_1 = (rbnf_named__off_0, 'lang_atom got EOF')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_lang_lst(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rbnfmacro_6(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
rbnf_tmp_1_ = rbnf_tmp_0
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_lang_stmts(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rbnfmacro_5(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = maybeStmts(rbnf_tmp_0)
rbnf_tmp_1_ = lcl_1
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_pragma(builtin_state, builtin_tokens):
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_0 = _rbnf_peek_tmp
if lcl_0:
lcl_2 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_2 = lcl_2.idint
if (lcl_2 == 25):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = rbnf_named_parse_rbnfmacro_4(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_3
lcl_3 = rbnf_named__check_1[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_1
else:
lcl_4 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_4
lcl_4 = Params(rbnf_tmp_1)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 24):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = builtin_tokens.offset
rbnf_named__off_1 = lcl_3
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_3 = _rbnf_peek_tmp
if lcl_3:
lcl_5 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_5 = lcl_5.idint
if (lcl_5 == 2):
lcl_6 = rbnf_named_parse_rbnfmacro_7(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
lcl_7 = Include(None, rbnf_tmp_1)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 1):
lcl_6 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_6
lcl_6 = rbnf_named__check_1[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_1
else:
lcl_7 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_7
lcl_7 = rbnf_named_parse_rbnfmacro_7(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_7
lcl_7 = rbnf_named__check_2[0]
lcl_7 = (lcl_7 == False)
if lcl_7:
lcl_7 = rbnf_named__check_2
else:
lcl_8 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_8
lcl_8 = Include(rbnf_tmp_1, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_8
lcl_8 = (True, rbnf_tmp_1_)
lcl_7 = lcl_8
lcl_6 = lcl_7
lcl_4 = lcl_6
else:
lcl_6 = (rbnf_named__off_1, 'pragma lookahead failed')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_4 = lcl_6
lcl_3 = lcl_4
else:
lcl_4 = (rbnf_named__off_1, 'pragma got EOF')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 1):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 26):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_3 = _rbnf_cur_token
rbnf_tmp_1 = lcl_3
lcl_3 = (rbnf_tmp_1 is None)
if lcl_3:
lcl_4 = builtin_tokens.offset
lcl_4 = (lcl_4, 'Code not match')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_3 = lcl_4
else:
lcl_4 = rbnf_tmp_0.value
lcl_5 = []
lcl_6 = rbnf_tmp_1.value
lcl_6 = (lcl_6, None)
_rbnf_immediate_lst = lcl_5
_rbnf_immediate_lst.append(lcl_6)
lcl_5 = _rbnf_immediate_lst
lcl_4 = Include(lcl_4, lcl_5)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 26):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_3 = _rbnf_cur_token
rbnf_tmp_0 = lcl_3
lcl_3 = []
lcl_4 = rbnf_tmp_0.value
lcl_4 = (lcl_4, None)
_rbnf_immediate_lst = lcl_3
_rbnf_immediate_lst.append(lcl_4)
lcl_3 = _rbnf_immediate_lst
lcl_3 = Include(None, lcl_3)
rbnf_tmp_1_ = lcl_3
lcl_3 = (True, rbnf_tmp_1_)
lcl_1 = lcl_3
else:
lcl_3 = (rbnf_named__off_0, 'pragma lookahead failed')
lcl_3 = builtin_cons(lcl_3, builtin_nil)
lcl_3 = (False, lcl_3)
lcl_1 = lcl_3
lcl_0 = lcl_1
else:
lcl_1 = (rbnf_named__off_0, 'pragma got EOF')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_prod(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_1 = _rbnf_peek_tmp
if lcl_1:
lcl_3 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_3 = lcl_3.idint
if (lcl_3 == 7):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_4 = _rbnf_cur_token
rbnf_tmp_1 = lcl_4
lcl_4 = builtin_tokens.offset
rbnf_named__off_1 = lcl_4
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_4 = _rbnf_peek_tmp
if lcl_4:
lcl_6 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_6 = lcl_6.idint
if (lcl_6 == 8):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_7 = _rbnf_cur_token
rbnf_tmp_2 = lcl_7
lcl_7 = rbnf_named_parse_def(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_7
lcl_7 = rbnf_named__check_3[0]
lcl_7 = (lcl_7 == False)
if lcl_7:
lcl_7 = rbnf_named__check_3
else:
lcl_8 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_8
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 20):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_8 = _rbnf_cur_token
rbnf_tmp_4 = lcl_8
lcl_8 = (rbnf_tmp_4 is None)
if lcl_8:
lcl_9 = builtin_tokens.offset
lcl_9 = (lcl_9, 'quote ; not match')
lcl_9 = builtin_cons(lcl_9, builtin_nil)
lcl_9 = (False, lcl_9)
lcl_8 = lcl_9
else:
lcl_9 = []
lcl_9 = MacroDef(rbnf_tmp_0, lcl_9, rbnf_tmp_3)
rbnf_tmp_1_ = lcl_9
lcl_9 = (True, rbnf_tmp_1_)
lcl_8 = lcl_9
lcl_7 = lcl_8
lcl_5 = lcl_7
elif (lcl_6 == 1):
lcl_7 = rbnf_named_parse_IdentList(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_7
lcl_7 = rbnf_named__check_2[0]
lcl_7 = (lcl_7 == False)
if lcl_7:
lcl_7 = rbnf_named__check_2
else:
lcl_8 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_8
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 8):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_8 = _rbnf_cur_token
rbnf_tmp_3 = lcl_8
lcl_8 = (rbnf_tmp_3 is None)
if lcl_8:
lcl_9 = builtin_tokens.offset
lcl_9 = (lcl_9, 'quote ] not match')
lcl_9 = builtin_cons(lcl_9, builtin_nil)
lcl_9 = (False, lcl_9)
lcl_8 = lcl_9
else:
lcl_9 = rbnf_named_parse_def(builtin_state, builtin_tokens)
rbnf_named__check_4 = lcl_9
lcl_9 = rbnf_named__check_4[0]
lcl_9 = (lcl_9 == False)
if lcl_9:
lcl_9 = rbnf_named__check_4
else:
lcl_10 = rbnf_named__check_4[1]
rbnf_tmp_4 = lcl_10
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 20):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_10 = _rbnf_cur_token
rbnf_tmp_5 = lcl_10
lcl_10 = (rbnf_tmp_5 is None)
if lcl_10:
lcl_11 = builtin_tokens.offset
lcl_11 = (lcl_11, 'quote ; not match')
lcl_11 = builtin_cons(lcl_11, builtin_nil)
lcl_11 = (False, lcl_11)
lcl_10 = lcl_11
else:
lcl_11 = MacroDef(rbnf_tmp_0, rbnf_tmp_2, rbnf_tmp_4)
rbnf_tmp_1_ = lcl_11
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_9 = lcl_10
lcl_8 = lcl_9
lcl_7 = lcl_8
lcl_5 = lcl_7
else:
lcl_10 = (rbnf_named__off_1, 'prod lookahead failed')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_5 = lcl_10
lcl_4 = lcl_5
else:
lcl_10 = (rbnf_named__off_1, 'prod got EOF')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_4 = lcl_10
lcl_2 = lcl_4
elif (lcl_3 == 19):
lcl_10 = rbnf_named_parse_def(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 20):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_11 = _rbnf_cur_token
rbnf_tmp_2 = lcl_11
lcl_11 = (rbnf_tmp_2 is None)
if lcl_11:
lcl_4 = builtin_tokens.offset
lcl_4 = (lcl_4, 'quote ; not match')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_11 = lcl_4
else:
lcl_4 = Def(rbnf_tmp_0, rbnf_tmp_1)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_11 = lcl_4
lcl_10 = lcl_11
lcl_2 = lcl_10
elif (lcl_3 == 17):
lcl_10 = rbnf_named_parse_def(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 20):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_11 = _rbnf_cur_token
rbnf_tmp_2 = lcl_11
lcl_11 = (rbnf_tmp_2 is None)
if lcl_11:
lcl_4 = builtin_tokens.offset
lcl_4 = (lcl_4, 'quote ; not match')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_11 = lcl_4
else:
lcl_4 = Def(rbnf_tmp_0, rbnf_tmp_1)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_11 = lcl_4
lcl_10 = lcl_11
lcl_2 = lcl_10
elif (lcl_3 == 18):
lcl_10 = rbnf_named_parse_def(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 20):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_11 = _rbnf_cur_token
rbnf_tmp_2 = lcl_11
lcl_11 = (rbnf_tmp_2 is None)
if lcl_11:
lcl_4 = builtin_tokens.offset
lcl_4 = (lcl_4, 'quote ; not match')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_11 = lcl_4
else:
lcl_4 = Def(rbnf_tmp_0, rbnf_tmp_1)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_11 = lcl_4
lcl_10 = lcl_11
lcl_2 = lcl_10
elif (lcl_3 == 16):
lcl_10 = rbnf_named_parse_def(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 20):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_11 = _rbnf_cur_token
rbnf_tmp_2 = lcl_11
lcl_11 = (rbnf_tmp_2 is None)
if lcl_11:
lcl_4 = builtin_tokens.offset
lcl_4 = (lcl_4, 'quote ; not match')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_11 = lcl_4
else:
lcl_4 = Def(rbnf_tmp_0, rbnf_tmp_1)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_11 = lcl_4
lcl_10 = lcl_11
lcl_2 = lcl_10
else:
lcl_10 = (rbnf_named__off_0, 'prod lookahead failed')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_2 = lcl_10
lcl_1 = lcl_2
else:
lcl_10 = (rbnf_named__off_0, 'prod got EOF')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_1 = lcl_10
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_0(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_expr(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_0(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_1(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_atomExpr(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_1(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_2(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_cseq(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_2(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_3(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rewrite(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_3(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_4(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_Ident(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_4(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_5(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_lang(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_5(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_6(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_lang(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_6(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_7(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_filename(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = []
_rbnf_immediate_lst = lcl_1
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_1 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_1
lcl_1 = rbnf_named_lr_loop_rbnfmacro_7(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rbnfmacro_8(builtin_state, builtin_tokens):
lcl_0 = builtin_tokens.offset
rbnf_named__off_0 = lcl_0
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_0 = _rbnf_peek_tmp
if lcl_0:
lcl_2 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_2 = lcl_2.idint
if (lcl_2 == 25):
lcl_3 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_3
lcl_3 = rbnf_named__check_0[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_0
else:
lcl_4 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_4
lcl_4 = []
_rbnf_immediate_lst = lcl_4
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_4 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_4
lcl_4 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 24):
lcl_3 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_3
lcl_3 = rbnf_named__check_0[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_0
else:
lcl_4 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_4
lcl_4 = []
_rbnf_immediate_lst = lcl_4
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_4 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_4
lcl_4 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 1):
lcl_3 = builtin_tokens.offset
rbnf_named__off_1 = lcl_3
try:
builtin_tokens.array[(builtin_tokens.offset + 1)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_3 = _rbnf_peek_tmp
if lcl_3:
lcl_5 = builtin_tokens.array[(builtin_tokens.offset + 1)]
lcl_5 = lcl_5.idint
if (lcl_5 == 7):
lcl_6 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_6
lcl_6 = rbnf_named__check_0[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_0
else:
lcl_7 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_7
lcl_7 = []
_rbnf_immediate_lst = lcl_7
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_7 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_7
lcl_7 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 19):
lcl_6 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_6
lcl_6 = rbnf_named__check_0[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_0
else:
lcl_7 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_7
lcl_7 = []
_rbnf_immediate_lst = lcl_7
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_7 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_7
lcl_7 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 17):
lcl_6 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_6
lcl_6 = rbnf_named__check_0[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_0
else:
lcl_7 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_7
lcl_7 = []
_rbnf_immediate_lst = lcl_7
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_7 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_7
lcl_7 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 18):
lcl_6 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_6
lcl_6 = rbnf_named__check_0[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_0
else:
lcl_7 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_7
lcl_7 = []
_rbnf_immediate_lst = lcl_7
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_7 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_7
lcl_7 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 16):
lcl_6 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_6
lcl_6 = rbnf_named__check_0[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_0
else:
lcl_7 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_7
lcl_7 = []
_rbnf_immediate_lst = lcl_7
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_7 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_7
lcl_7 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 26):
lcl_6 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_6
lcl_6 = rbnf_named__check_0[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_0
else:
lcl_7 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_7
lcl_7 = []
_rbnf_immediate_lst = lcl_7
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_7 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_7
lcl_7 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_7
lcl_4 = lcl_6
else:
lcl_6 = rbnf_named_parse_prod(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_6
lcl_6 = rbnf_named__check_0[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_0
else:
lcl_7 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_7
lcl_7 = []
_rbnf_immediate_lst = lcl_7
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_7 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_7
lcl_7 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_6 = lcl_7
lcl_4 = lcl_6
lcl_3 = lcl_4
else:
lcl_4 = (rbnf_named__off_1, 'rbnfmacro_8 got EOF')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 26):
lcl_3 = rbnf_named_parse_pragma(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_3
lcl_3 = rbnf_named__check_0[0]
lcl_3 = (lcl_3 == False)
if lcl_3:
lcl_3 = rbnf_named__check_0
else:
lcl_4 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_4
lcl_4 = []
_rbnf_immediate_lst = lcl_4
_rbnf_immediate_lst.append(rbnf_tmp_0)
lcl_4 = _rbnf_immediate_lst
rbnf_tmp_1_ = lcl_4
lcl_4 = rbnf_named_lr_loop_rbnfmacro_8(rbnf_tmp_1_, builtin_state, builtin_tokens)
lcl_3 = lcl_4
lcl_1 = lcl_3
else:
lcl_3 = (rbnf_named__off_0, 'rbnfmacro_8 lookahead failed')
lcl_3 = builtin_cons(lcl_3, builtin_nil)
lcl_3 = (False, lcl_3)
lcl_1 = lcl_3
lcl_0 = lcl_1
else:
lcl_1 = (rbnf_named__off_0, 'rbnfmacro_8 got EOF')
lcl_1 = builtin_cons(lcl_1, builtin_nil)
lcl_1 = (False, lcl_1)
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_rewrite(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_seq(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
lcl_1 = builtin_tokens.offset
rbnf_named__off_0 = lcl_1
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_1 = _rbnf_peek_tmp
if lcl_1:
lcl_3 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_3 = lcl_3.idint
if (lcl_3 == 14):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_4 = _rbnf_cur_token
rbnf_tmp_1 = lcl_4
lcl_4 = rbnf_named_parse_lang_stmts(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_4
lcl_4 = rbnf_named__check_2[0]
lcl_4 = (lcl_4 == False)
if lcl_4:
lcl_4 = rbnf_named__check_2
else:
lcl_5 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_5
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 15):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_5 = _rbnf_cur_token
rbnf_tmp_3 = lcl_5
lcl_5 = (rbnf_tmp_3 is None)
if lcl_5:
lcl_6 = builtin_tokens.offset
lcl_6 = (lcl_6, 'quote } not match')
lcl_6 = builtin_cons(lcl_6, builtin_nil)
lcl_6 = (False, lcl_6)
lcl_5 = lcl_6
else:
lcl_6 = seq(rbnf_tmp_0)
lcl_6 = Rewrite(lcl_6, rbnf_tmp_2)
rbnf_tmp_1_ = lcl_6
lcl_6 = (True, rbnf_tmp_1_)
lcl_5 = lcl_6
lcl_4 = lcl_5
lcl_2 = lcl_4
else:
lcl_4 = seq(rbnf_tmp_0)
lcl_4 = Rewrite(lcl_4, None)
rbnf_tmp_1_ = lcl_4
lcl_4 = (True, rbnf_tmp_1_)
lcl_2 = lcl_4
lcl_1 = lcl_2
else:
lcl_2 = (rbnf_named__off_0, 'rewrite got EOF')
lcl_2 = builtin_cons(lcl_2, builtin_nil)
lcl_2 = (False, lcl_2)
lcl_1 = lcl_2
lcl_0 = lcl_1
return lcl_0
def rbnf_named_parse_seq(builtin_state, builtin_tokens):
lcl_0 = rbnf_named_parse_rbnfmacro_1(builtin_state, builtin_tokens)
rbnf_named__check_0 = lcl_0
lcl_0 = rbnf_named__check_0[0]
lcl_0 = (lcl_0 == False)
if lcl_0:
lcl_0 = rbnf_named__check_0
else:
lcl_1 = rbnf_named__check_0[1]
rbnf_tmp_0 = lcl_1
rbnf_tmp_1_ = rbnf_tmp_0
lcl_1 = (True, rbnf_tmp_1_)
lcl_0 = lcl_1
return lcl_0
return rbnf_named_parse_START | 43.334991 | 121 | 0.471474 | 19,361 | 165,583 | 3.497392 | 0.020867 | 0.10128 | 0.071951 | 0.073103 | 0.903578 | 0.890184 | 0.882534 | 0.864945 | 0.853263 | 0.833178 | 0 | 0.0604 | 0.473062 | 165,583 | 3,821 | 122 | 43.334991 | 0.715663 | 0.009143 | 0 | 0.784009 | 0 | 0 | 0.01408 | 0.000622 | 0 | 0 | 0 | 0 | 0.000555 | 1 | 0.028873 | false | 0.00111 | 0.003887 | 0.003887 | 0.077735 | 0.00111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0bdb9181dcdd3d76e5702620eda6f3eb716b6ec9 | 133 | py | Python | datacatalog/identifiers/typeduuid/examples.py | SD2E/python-datacatalog | 51ab366639505fb6e8a14cd6b446de37080cd20d | [
"CNRI-Python"
] | null | null | null | datacatalog/identifiers/typeduuid/examples.py | SD2E/python-datacatalog | 51ab366639505fb6e8a14cd6b446de37080cd20d | [
"CNRI-Python"
] | 2 | 2019-07-25T15:39:04.000Z | 2019-10-21T15:31:46.000Z | datacatalog/identifiers/typeduuid/examples.py | SD2E/python-datacatalog | 51ab366639505fb6e8a14cd6b446de37080cd20d | [
"CNRI-Python"
] | 1 | 2019-10-15T14:33:44.000Z | 2019-10-15T14:33:44.000Z | TYPEDUUID = ['1010f74e-cd69-51de-b5a1-1570c88c01f1', '114d6d85-6dab-54af-8fee-5d7871df2a55', '102edd93-29d6-5483-b60b-8dfd4d094b9c']
| 66.5 | 132 | 0.789474 | 16 | 133 | 6.5625 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.460938 | 0.037594 | 133 | 1 | 133 | 133 | 0.359375 | 0 | 0 | 0 | 0 | 0 | 0.81203 | 0.81203 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
040ee0c223bedf396922f127ac400909a4e0bed7 | 87,385 | py | Python | nova/tests/unit/virt/libvirt/test_imagecache.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/virt/libvirt/test_imagecache.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/virt/libvirt/test_imagecache.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright 2012 Michael Still and Canonical Inc'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
nl|'\n'
name|'import'
name|'contextlib'
newline|'\n'
name|'import'
name|'hashlib'
newline|'\n'
name|'import'
name|'os'
newline|'\n'
name|'import'
name|'time'
newline|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'from'
name|'oslo_concurrency'
name|'import'
name|'lockutils'
newline|'\n'
name|'from'
name|'oslo_concurrency'
name|'import'
name|'processutils'
newline|'\n'
name|'from'
name|'oslo_log'
name|'import'
name|'formatters'
newline|'\n'
name|'from'
name|'oslo_log'
name|'import'
name|'log'
name|'as'
name|'logging'
newline|'\n'
name|'from'
name|'oslo_serialization'
name|'import'
name|'jsonutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'importutils'
newline|'\n'
name|'from'
name|'six'
op|'.'
name|'moves'
name|'import'
name|'cStringIO'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'conductor'
newline|'\n'
name|'import'
name|'nova'
op|'.'
name|'conf'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'context'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_instance'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'utils'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'libvirt'
name|'import'
name|'imagecache'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'libvirt'
name|'import'
name|'utils'
name|'as'
name|'libvirt_utils'
newline|'\n'
nl|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'nova'
op|'.'
name|'conf'
op|'.'
name|'CONF'
newline|'\n'
nl|'\n'
nl|'\n'
op|'@'
name|'contextlib'
op|'.'
name|'contextmanager'
newline|'\n'
DECL|function|intercept_log_messages
name|'def'
name|'intercept_log_messages'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'mylog'
op|'='
name|'logging'
op|'.'
name|'getLogger'
op|'('
string|"'nova'"
op|')'
newline|'\n'
name|'stream'
op|'='
name|'cStringIO'
op|'('
op|')'
newline|'\n'
name|'handler'
op|'='
name|'logging'
op|'.'
name|'logging'
op|'.'
name|'StreamHandler'
op|'('
name|'stream'
op|')'
newline|'\n'
name|'handler'
op|'.'
name|'setFormatter'
op|'('
name|'formatters'
op|'.'
name|'ContextFormatter'
op|'('
op|')'
op|')'
newline|'\n'
name|'mylog'
op|'.'
name|'logger'
op|'.'
name|'addHandler'
op|'('
name|'handler'
op|')'
newline|'\n'
name|'yield'
name|'stream'
newline|'\n'
dedent|''
name|'finally'
op|':'
newline|'\n'
indent|' '
name|'mylog'
op|'.'
name|'logger'
op|'.'
name|'removeHandler'
op|'('
name|'handler'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ImageCacheManagerTestCase
dedent|''
dedent|''
name|'class'
name|'ImageCacheManagerTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ImageCacheManagerTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stock_instance_names'
op|'='
name|'set'
op|'('
op|'['
string|"'instance-00000001'"
op|','
nl|'\n'
string|"'instance-00000002'"
op|','
nl|'\n'
string|"'instance-00000003'"
op|','
nl|'\n'
string|"'banana-42-hamster'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_read_stored_checksum_missing
dedent|''
name|'def'
name|'test_read_stored_checksum_missing'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
name|'lambda'
name|'x'
op|':'
name|'False'
op|')'
newline|'\n'
name|'csum'
op|'='
name|'imagecache'
op|'.'
name|'read_stored_checksum'
op|'('
string|"'/tmp/foo'"
op|','
name|'timestamped'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'csum'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'os'
op|'.'
name|'path'
op|','
string|"'exists'"
op|','
name|'return_value'
op|'='
name|'True'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'time'
op|','
string|"'time'"
op|','
name|'return_value'
op|'='
number|'2000000'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'os'
op|'.'
name|'path'
op|','
string|"'getmtime'"
op|','
name|'return_value'
op|'='
number|'1000000'
op|')'
newline|'\n'
DECL|member|test_get_age_of_file
name|'def'
name|'test_get_age_of_file'
op|'('
name|'self'
op|','
name|'mock_getmtime'
op|','
name|'mock_time'
op|','
name|'mock_exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'exists'
op|','
name|'age'
op|'='
name|'image_cache_manager'
op|'.'
name|'_get_age_of_file'
op|'('
string|"'/tmp'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'exists'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1000000'
op|','
name|'age'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'os'
op|'.'
name|'path'
op|','
string|"'exists'"
op|','
name|'return_value'
op|'='
name|'False'
op|')'
newline|'\n'
DECL|member|test_get_age_of_file_not_exists
name|'def'
name|'test_get_age_of_file_not_exists'
op|'('
name|'self'
op|','
name|'mock_exists'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'exists'
op|','
name|'age'
op|'='
name|'image_cache_manager'
op|'.'
name|'_get_age_of_file'
op|'('
string|"'/tmp'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'exists'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'age'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_read_stored_checksum
dedent|''
name|'def'
name|'test_read_stored_checksum'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
name|'csum_input'
op|'='
string|'\'{"sha1": "fdghkfhkgjjksfdgjksjkghsdf"}\\n\''
newline|'\n'
name|'fname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'aaa'"
op|')'
newline|'\n'
name|'info_fname'
op|'='
name|'imagecache'
op|'.'
name|'get_info_filename'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'f'
op|'='
name|'open'
op|'('
name|'info_fname'
op|','
string|"'w'"
op|')'
newline|'\n'
name|'f'
op|'.'
name|'write'
op|'('
name|'csum_input'
op|')'
newline|'\n'
name|'f'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'csum_output'
op|'='
name|'imagecache'
op|'.'
name|'read_stored_checksum'
op|'('
name|'fname'
op|','
nl|'\n'
name|'timestamped'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'csum_input'
op|'.'
name|'rstrip'
op|'('
op|')'
op|','
nl|'\n'
string|'\'{"sha1": "%s"}\''
op|'%'
name|'csum_output'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_read_stored_checksum_legacy_essex
dedent|''
dedent|''
name|'def'
name|'test_read_stored_checksum_legacy_essex'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
name|'fname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'aaa'"
op|')'
newline|'\n'
name|'old_fname'
op|'='
name|'fname'
op|'+'
string|"'.sha1'"
newline|'\n'
name|'f'
op|'='
name|'open'
op|'('
name|'old_fname'
op|','
string|"'w'"
op|')'
newline|'\n'
name|'f'
op|'.'
name|'write'
op|'('
string|"'fdghkfhkgjjksfdgjksjkghsdf'"
op|')'
newline|'\n'
name|'f'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'csum_output'
op|'='
name|'imagecache'
op|'.'
name|'read_stored_checksum'
op|'('
name|'fname'
op|','
nl|'\n'
name|'timestamped'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'csum_output'
op|','
string|"'fdghkfhkgjjksfdgjksjkghsdf'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'old_fname'
op|')'
op|')'
newline|'\n'
name|'info_fname'
op|'='
name|'imagecache'
op|'.'
name|'get_info_filename'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'info_fname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_base_images
dedent|''
dedent|''
name|'def'
name|'test_list_base_images'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'listing'
op|'='
op|'['
string|"'00000001'"
op|','
nl|'\n'
string|"'ephemeral_0_20_None'"
op|','
nl|'\n'
string|"'17d1b00b81642842e514494a78e804e9a511637c_5368709120.info'"
op|','
nl|'\n'
string|"'00000004'"
op|','
nl|'\n'
string|"'swap_1000'"
op|']'
newline|'\n'
name|'images'
op|'='
op|'['
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_sm'"
op|','
nl|'\n'
string|"'e09c675c2d1cfac32dae3c2d83689c8c94bc693b_sm'"
op|','
nl|'\n'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3'"
op|','
nl|'\n'
string|"'17d1b00b81642842e514494a78e804e9a511637c'"
op|','
nl|'\n'
string|"'17d1b00b81642842e514494a78e804e9a511637c_5368709120'"
op|','
nl|'\n'
string|"'17d1b00b81642842e514494a78e804e9a511637c_10737418240'"
op|']'
newline|'\n'
name|'listing'
op|'.'
name|'extend'
op|'('
name|'images'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
name|'lambda'
name|'x'
op|':'
name|'listing'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.isfile'"
op|','
name|'lambda'
name|'x'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'base_dir'
op|'='
string|"'/var/lib/nova/instances/_base'"
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
string|"'/var/lib/nova/instances'"
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_list_base_images'
op|'('
name|'base_dir'
op|')'
newline|'\n'
nl|'\n'
name|'sanitized'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'ent'
name|'in'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|':'
newline|'\n'
indent|' '
name|'sanitized'
op|'.'
name|'append'
op|'('
name|'ent'
op|'.'
name|'replace'
op|'('
name|'base_dir'
op|'+'
string|"'/'"
op|','
string|"''"
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'sorted'
op|'('
name|'sanitized'
op|')'
op|','
name|'sorted'
op|'('
name|'images'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
nl|'\n'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'expected'
op|','
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
nl|'\n'
string|"'17d1b00b81642842e514494a78e804e9a511637c_'"
nl|'\n'
string|"'10737418240'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'expected'
op|','
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|')'
newline|'\n'
nl|'\n'
name|'unexpected'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
string|"'00000004'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'unexpected'
op|','
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'ent'
name|'in'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'ent'
op|'.'
name|'startswith'
op|'('
name|'base_dir'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'originals'
op|')'
op|','
number|'2'
op|')'
newline|'\n'
nl|'\n'
name|'expected'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
nl|'\n'
string|"'17d1b00b81642842e514494a78e804e9a511637c'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'expected'
op|','
name|'image_cache_manager'
op|'.'
name|'originals'
op|')'
newline|'\n'
nl|'\n'
name|'unexpected'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
nl|'\n'
string|"'17d1b00b81642842e514494a78e804e9a511637c_'"
nl|'\n'
string|"'10737418240'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'unexpected'
op|','
name|'image_cache_manager'
op|'.'
name|'originals'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'back_swap_images'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'swap_1000'"
op|','
name|'image_cache_manager'
op|'.'
name|'back_swap_images'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_backing_images_small
dedent|''
name|'def'
name|'test_list_backing_images_small'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
op|'['
string|"'_base'"
op|','
string|"'instance-00000001'"
op|','
nl|'\n'
string|"'instance-00000002'"
op|','
string|"'instance-00000003'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
name|'x'
op|'.'
name|'find'
op|'('
string|"'instance-'"
op|')'
op|'!='
op|'-'
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'libvirt_utils'
op|','
string|"'get_disk_backing_file'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_sm'"
op|')'
newline|'\n'
nl|'\n'
name|'found'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'image_cache_subdirectory_name'
op|','
nl|'\n'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_sm'"
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'found'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'instance_names'
op|'='
name|'self'
op|'.'
name|'stock_instance_names'
newline|'\n'
nl|'\n'
name|'inuse_images'
op|'='
name|'image_cache_manager'
op|'.'
name|'_list_backing_images'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'inuse_images'
op|','
op|'['
name|'found'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|')'
op|','
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_backing_images_resized
dedent|''
name|'def'
name|'test_list_backing_images_resized'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
op|'['
string|"'_base'"
op|','
string|"'instance-00000001'"
op|','
nl|'\n'
string|"'instance-00000002'"
op|','
string|"'instance-00000003'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
name|'x'
op|'.'
name|'find'
op|'('
string|"'instance-'"
op|')'
op|'!='
op|'-'
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'libvirt_utils'
op|','
string|"'get_disk_backing_file'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
op|'('
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_'"
nl|'\n'
string|"'10737418240'"
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'found'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'image_cache_subdirectory_name'
op|','
nl|'\n'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_'"
nl|'\n'
string|"'10737418240'"
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'found'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'instance_names'
op|'='
name|'self'
op|'.'
name|'stock_instance_names'
newline|'\n'
nl|'\n'
name|'inuse_images'
op|'='
name|'image_cache_manager'
op|'.'
name|'_list_backing_images'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'inuse_images'
op|','
op|'['
name|'found'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|')'
op|','
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_backing_images_instancename
dedent|''
name|'def'
name|'test_list_backing_images_instancename'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
op|'['
string|"'_base'"
op|','
string|"'banana-42-hamster'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
name|'x'
op|'.'
name|'find'
op|'('
string|"'banana-42-hamster'"
op|')'
op|'!='
op|'-'
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'libvirt_utils'
op|','
string|"'get_disk_backing_file'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_sm'"
op|')'
newline|'\n'
nl|'\n'
name|'found'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'image_cache_subdirectory_name'
op|','
nl|'\n'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_sm'"
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'found'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'instance_names'
op|'='
name|'self'
op|'.'
name|'stock_instance_names'
newline|'\n'
nl|'\n'
name|'inuse_images'
op|'='
name|'image_cache_manager'
op|'.'
name|'_list_backing_images'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'inuse_images'
op|','
op|'['
name|'found'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|')'
op|','
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_list_backing_images_disk_notexist
dedent|''
name|'def'
name|'test_list_backing_images_disk_notexist'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
op|'['
string|"'_base'"
op|','
string|"'banana-42-hamster'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
name|'x'
op|'.'
name|'find'
op|'('
string|"'banana-42-hamster'"
op|')'
op|'!='
op|'-'
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_get_disk
name|'def'
name|'fake_get_disk'
op|'('
name|'disk_path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'libvirt_utils'
op|','
string|"'get_disk_backing_file'"
op|','
name|'fake_get_disk'
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'instance_names'
op|'='
name|'self'
op|'.'
name|'stock_instance_names'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|','
nl|'\n'
name|'image_cache_manager'
op|'.'
name|'_list_backing_images'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_find_base_file_nothing
dedent|''
name|'def'
name|'test_find_base_file_nothing'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
name|'lambda'
name|'x'
op|':'
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'base_dir'
op|'='
string|"'/var/lib/nova/instances/_base'"
newline|'\n'
name|'fingerprint'
op|'='
string|"'549867354867'"
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'res'
op|'='
name|'list'
op|'('
name|'image_cache_manager'
op|'.'
name|'_find_base_file'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_find_base_file_small
dedent|''
name|'def'
name|'test_find_base_file_small'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fingerprint'
op|'='
string|"'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a'"
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
name|'x'
op|'.'
name|'endswith'
op|'('
string|"'%s_sm'"
op|'%'
name|'fingerprint'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'base_dir'
op|'='
string|"'/var/lib/nova/instances/_base'"
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'res'
op|'='
name|'list'
op|'('
name|'image_cache_manager'
op|'.'
name|'_find_base_file'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'base_file'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|'+'
string|"'_sm'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'res'
op|','
op|'['
op|'('
name|'base_file'
op|','
name|'True'
op|','
name|'False'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_find_base_file_resized
dedent|''
name|'def'
name|'test_find_base_file_resized'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fingerprint'
op|'='
string|"'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a'"
newline|'\n'
name|'listing'
op|'='
op|'['
string|"'00000001'"
op|','
nl|'\n'
string|"'ephemeral_0_20_None'"
op|','
nl|'\n'
string|"'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a_10737418240'"
op|','
nl|'\n'
string|"'00000004'"
op|']'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
name|'lambda'
name|'x'
op|':'
name|'listing'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
name|'x'
op|'.'
name|'endswith'
op|'('
string|"'%s_10737418240'"
op|'%'
name|'fingerprint'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.isfile'"
op|','
name|'lambda'
name|'x'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'base_dir'
op|'='
string|"'/var/lib/nova/instances/_base'"
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_list_base_images'
op|'('
name|'base_dir'
op|')'
newline|'\n'
name|'res'
op|'='
name|'list'
op|'('
name|'image_cache_manager'
op|'.'
name|'_find_base_file'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'base_file'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|'+'
string|"'_10737418240'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'res'
op|','
op|'['
op|'('
name|'base_file'
op|','
name|'False'
op|','
name|'True'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_find_base_file_all
dedent|''
name|'def'
name|'test_find_base_file_all'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fingerprint'
op|'='
string|"'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a'"
newline|'\n'
name|'listing'
op|'='
op|'['
string|"'00000001'"
op|','
nl|'\n'
string|"'ephemeral_0_20_None'"
op|','
nl|'\n'
string|"'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a_sm'"
op|','
nl|'\n'
string|"'968dd6cc49e01aaa044ed11c0cce733e0fa44a6a_10737418240'"
op|','
nl|'\n'
string|"'00000004'"
op|']'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
name|'lambda'
name|'x'
op|':'
name|'listing'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
name|'lambda'
name|'x'
op|':'
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.isfile'"
op|','
name|'lambda'
name|'x'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'base_dir'
op|'='
string|"'/var/lib/nova/instances/_base'"
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_list_base_images'
op|'('
name|'base_dir'
op|')'
newline|'\n'
name|'res'
op|'='
name|'list'
op|'('
name|'image_cache_manager'
op|'.'
name|'_find_base_file'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'base_file1'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|')'
newline|'\n'
name|'base_file2'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|'+'
string|"'_sm'"
op|')'
newline|'\n'
name|'base_file3'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'base_dir'
op|','
name|'fingerprint'
op|'+'
string|"'_10737418240'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'res'
op|','
op|'['
op|'('
name|'base_file1'
op|','
name|'False'
op|','
name|'False'
op|')'
op|','
nl|'\n'
op|'('
name|'base_file2'
op|','
name|'True'
op|','
name|'False'
op|')'
op|','
nl|'\n'
op|'('
name|'base_file3'
op|','
name|'False'
op|','
name|'True'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'contextlib'
op|'.'
name|'contextmanager'
newline|'\n'
DECL|member|_make_base_file
name|'def'
name|'_make_base_file'
op|'('
name|'self'
op|','
name|'checksum'
op|'='
name|'True'
op|','
name|'lock'
op|'='
name|'True'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Make a base file for testing."""'
newline|'\n'
nl|'\n'
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
name|'fname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'aaa'"
op|')'
newline|'\n'
nl|'\n'
name|'base_file'
op|'='
name|'open'
op|'('
name|'fname'
op|','
string|"'w'"
op|')'
newline|'\n'
name|'base_file'
op|'.'
name|'write'
op|'('
string|"'data'"
op|')'
newline|'\n'
name|'base_file'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'lock'
op|':'
newline|'\n'
indent|' '
name|'lockdir'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'locks'"
op|')'
newline|'\n'
name|'lockname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'lockdir'
op|','
string|"'nova-aaa'"
op|')'
newline|'\n'
name|'os'
op|'.'
name|'mkdir'
op|'('
name|'lockdir'
op|')'
newline|'\n'
name|'lock_file'
op|'='
name|'open'
op|'('
name|'lockname'
op|','
string|"'w'"
op|')'
newline|'\n'
name|'lock_file'
op|'.'
name|'write'
op|'('
string|"'data'"
op|')'
newline|'\n'
name|'lock_file'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'base_file'
op|'='
name|'open'
op|'('
name|'fname'
op|','
string|"'r'"
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'checksum'
op|':'
newline|'\n'
indent|' '
name|'imagecache'
op|'.'
name|'write_stored_checksum'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'base_file'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
name|'yield'
name|'fname'
newline|'\n'
nl|'\n'
DECL|member|test_remove_base_file
dedent|''
dedent|''
name|'def'
name|'test_remove_base_file'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'self'
op|'.'
name|'_make_base_file'
op|'('
op|')'
name|'as'
name|'fname'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_base_file'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'info_fname'
op|'='
name|'imagecache'
op|'.'
name|'get_info_filename'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'lock_name'
op|'='
string|"'nova-'"
op|'+'
name|'os'
op|'.'
name|'path'
op|'.'
name|'split'
op|'('
name|'fname'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
name|'lock_dir'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
string|"'locks'"
op|')'
newline|'\n'
name|'lock_file'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'lock_dir'
op|','
name|'lock_name'
op|')'
newline|'\n'
nl|'\n'
comment|'# Files are initially too new to delete'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'info_fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'lock_file'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Old files get cleaned up though'
nl|'\n'
name|'os'
op|'.'
name|'utime'
op|'('
name|'fname'
op|','
op|'('
op|'-'
number|'1'
op|','
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'-'
number|'3601'
op|')'
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_base_file'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'info_fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'lock_file'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_remove_base_file_original
dedent|''
dedent|''
name|'def'
name|'test_remove_base_file_original'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'self'
op|'.'
name|'_make_base_file'
op|'('
op|')'
name|'as'
name|'fname'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'originals'
op|'='
op|'['
name|'fname'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_base_file'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'info_fname'
op|'='
name|'imagecache'
op|'.'
name|'get_info_filename'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
comment|'# Files are initially too new to delete'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'info_fname'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# This file should stay longer than a resized image'
nl|'\n'
name|'os'
op|'.'
name|'utime'
op|'('
name|'fname'
op|','
op|'('
op|'-'
number|'1'
op|','
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'-'
number|'3601'
op|')'
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_base_file'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'info_fname'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|"# Originals don't stay forever though"
nl|'\n'
name|'os'
op|'.'
name|'utime'
op|'('
name|'fname'
op|','
op|'('
op|'-'
number|'1'
op|','
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'-'
number|'3600'
op|'*'
number|'25'
op|')'
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_base_file'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'info_fname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_remove_base_file_dne
dedent|''
dedent|''
name|'def'
name|'test_remove_base_file_dne'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# This test is solely to execute the "does not exist" code path. We'
nl|'\n'
comment|"# don't expect the method being tested to do anything in this case."
nl|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
name|'fname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'aaa'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_base_file'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_remove_base_file_oserror
dedent|''
dedent|''
name|'def'
name|'test_remove_base_file_oserror'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'intercept_log_messages'
op|'('
op|')'
name|'as'
name|'stream'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
name|'fname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'aaa'"
op|')'
newline|'\n'
nl|'\n'
name|'os'
op|'.'
name|'mkdir'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'os'
op|'.'
name|'utime'
op|'('
name|'fname'
op|','
op|'('
op|'-'
number|'1'
op|','
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'-'
number|'3601'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# This will raise an OSError because of file permissions'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_base_file'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'fname'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
name|'stream'
op|'.'
name|'getvalue'
op|'('
op|')'
op|'.'
name|'find'
op|'('
string|"'Failed to remove'"
op|')'
op|','
nl|'\n'
op|'-'
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_handle_base_image_unused
dedent|''
dedent|''
dedent|''
name|'def'
name|'test_handle_base_image_unused'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'img'
op|'='
string|"'123'"
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_make_base_file'
op|'('
op|')'
name|'as'
name|'fname'
op|':'
newline|'\n'
indent|' '
name|'os'
op|'.'
name|'utime'
op|'('
name|'fname'
op|','
op|'('
op|'-'
number|'1'
op|','
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'-'
number|'3601'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'fname'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_handle_base_image'
op|'('
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'removable_base_files'
op|','
nl|'\n'
op|'['
name|'fname'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'corrupt_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'libvirt_utils'
op|','
string|"'update_mtime'"
op|')'
newline|'\n'
DECL|member|test_handle_base_image_used
name|'def'
name|'test_handle_base_image_used'
op|'('
name|'self'
op|','
name|'mock_mtime'
op|')'
op|':'
newline|'\n'
indent|' '
name|'img'
op|'='
string|"'123'"
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_make_base_file'
op|'('
op|')'
name|'as'
name|'fname'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'fname'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'used_images'
op|'='
op|'{'
string|"'123'"
op|':'
op|'('
number|'1'
op|','
number|'0'
op|','
op|'['
string|"'banana-42'"
op|']'
op|')'
op|'}'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_handle_base_image'
op|'('
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'mock_mtime'
op|'.'
name|'assert_called_once_with'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'removable_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'corrupt_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'libvirt_utils'
op|','
string|"'update_mtime'"
op|')'
newline|'\n'
DECL|member|test_handle_base_image_used_remotely
name|'def'
name|'test_handle_base_image_used_remotely'
op|'('
name|'self'
op|','
name|'mock_mtime'
op|')'
op|':'
newline|'\n'
indent|' '
name|'img'
op|'='
string|"'123'"
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_make_base_file'
op|'('
op|')'
name|'as'
name|'fname'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'fname'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'used_images'
op|'='
op|'{'
string|"'123'"
op|':'
op|'('
number|'0'
op|','
number|'1'
op|','
op|'['
string|"'banana-42'"
op|']'
op|')'
op|'}'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_handle_base_image'
op|'('
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'mock_mtime'
op|'.'
name|'assert_called_once_with'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'removable_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'corrupt_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_handle_base_image_absent
dedent|''
dedent|''
name|'def'
name|'test_handle_base_image_absent'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'img'
op|'='
string|"'123'"
newline|'\n'
nl|'\n'
name|'with'
name|'intercept_log_messages'
op|'('
op|')'
name|'as'
name|'stream'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'used_images'
op|'='
op|'{'
string|"'123'"
op|':'
op|'('
number|'1'
op|','
number|'0'
op|','
op|'['
string|"'banana-42'"
op|']'
op|')'
op|'}'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_handle_base_image'
op|'('
name|'img'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'removable_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'corrupt_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
name|'stream'
op|'.'
name|'getvalue'
op|'('
op|')'
op|'.'
name|'find'
op|'('
string|"'an absent base file'"
op|')'
op|','
nl|'\n'
op|'-'
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_handle_base_image_used_missing
dedent|''
dedent|''
name|'def'
name|'test_handle_base_image_used_missing'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'img'
op|'='
string|"'123'"
newline|'\n'
nl|'\n'
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
name|'fname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'aaa'"
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'fname'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'used_images'
op|'='
op|'{'
string|"'123'"
op|':'
op|'('
number|'1'
op|','
number|'0'
op|','
op|'['
string|"'banana-42'"
op|']'
op|')'
op|'}'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_handle_base_image'
op|'('
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'removable_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'corrupt_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'libvirt_utils'
op|','
string|"'update_mtime'"
op|')'
newline|'\n'
DECL|member|test_handle_base_image_checksum_fails
name|'def'
name|'test_handle_base_image_checksum_fails'
op|'('
name|'self'
op|','
name|'mock_mtime'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'checksum_base_images'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
name|'img'
op|'='
string|"'123'"
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_make_base_file'
op|'('
op|')'
name|'as'
name|'fname'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'open'
op|'('
name|'fname'
op|','
string|"'w'"
op|')'
name|'as'
name|'f'
op|':'
newline|'\n'
indent|' '
name|'f'
op|'.'
name|'write'
op|'('
string|"'banana'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'d'
op|'='
op|'{'
string|"'sha1'"
op|':'
string|"'21323454'"
op|'}'
newline|'\n'
name|'with'
name|'open'
op|'('
string|"'%s.info'"
op|'%'
name|'fname'
op|','
string|"'w'"
op|')'
name|'as'
name|'f'
op|':'
newline|'\n'
indent|' '
name|'f'
op|'.'
name|'write'
op|'('
name|'jsonutils'
op|'.'
name|'dumps'
op|'('
name|'d'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|'='
op|'['
name|'fname'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'used_images'
op|'='
op|'{'
string|"'123'"
op|':'
op|'('
number|'1'
op|','
number|'0'
op|','
op|'['
string|"'banana-42'"
op|']'
op|')'
op|'}'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_handle_base_image'
op|'('
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'mock_mtime'
op|'.'
name|'assert_called_once_with'
op|'('
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'unexplained_images'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'removable_base_files'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'corrupt_base_files'
op|','
nl|'\n'
op|'['
name|'fname'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'libvirt_utils'
op|','
string|"'update_mtime'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'lockutils'
op|','
string|"'external_lock'"
op|')'
newline|'\n'
DECL|member|test_verify_base_images
name|'def'
name|'test_verify_base_images'
op|'('
name|'self'
op|','
name|'mock_lock'
op|','
name|'mock_mtime'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hashed_1'
op|'='
string|"'356a192b7913b04c54574d18c28d46e6395428ab'"
newline|'\n'
name|'hashed_21'
op|'='
string|"'472b07b9fcf2c2451e8781e944bf5f77cd8457c8'"
newline|'\n'
name|'hashed_22'
op|'='
string|"'12c6fc06c99a462375eeb3f43dfd832b08ca9e17'"
newline|'\n'
name|'hashed_42'
op|'='
string|"'92cfceb39d57d914ed8b14d0e37643de0797ae56'"
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
string|"'/instance_path'"
op|','
nl|'\n'
name|'image_cache_subdirectory_name'
op|'='
string|"'_base'"
op|')'
newline|'\n'
nl|'\n'
name|'base_file_list'
op|'='
op|'['
string|"'00000001'"
op|','
nl|'\n'
string|"'ephemeral_0_20_None'"
op|','
nl|'\n'
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_sm'"
op|','
nl|'\n'
string|"'e09c675c2d1cfac32dae3c2d83689c8c94bc693b_sm'"
op|','
nl|'\n'
name|'hashed_42'
op|','
nl|'\n'
name|'hashed_1'
op|','
nl|'\n'
name|'hashed_21'
op|','
nl|'\n'
name|'hashed_22'
op|','
nl|'\n'
string|"'%s_5368709120'"
op|'%'
name|'hashed_1'
op|','
nl|'\n'
string|"'%s_10737418240'"
op|'%'
name|'hashed_1'
op|','
nl|'\n'
string|"'00000004'"
op|']'
newline|'\n'
nl|'\n'
DECL|function|fq_path
name|'def'
name|'fq_path'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
string|"'/instance_path/_base/'"
op|','
name|'path'
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake base directory existence'
nl|'\n'
dedent|''
name|'orig_exists'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
newline|'\n'
nl|'\n'
DECL|function|exists
name|'def'
name|'exists'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
comment|'# The python coverage tool got angry with my overly broad mocks'
nl|'\n'
indent|' '
name|'if'
name|'not'
name|'path'
op|'.'
name|'startswith'
op|'('
string|"'/instance_path'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'orig_exists'
op|'('
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'path'
name|'in'
op|'['
string|"'/instance_path'"
op|','
nl|'\n'
string|"'/instance_path/_base'"
op|','
nl|'\n'
string|"'/instance_path/instance-1/disk'"
op|','
nl|'\n'
string|"'/instance_path/instance-2/disk'"
op|','
nl|'\n'
string|"'/instance_path/instance-3/disk'"
op|','
nl|'\n'
string|"'/instance_path/_base/%s.info'"
op|'%'
name|'hashed_42'
op|']'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
nl|'\n'
dedent|''
name|'for'
name|'p'
name|'in'
name|'base_file_list'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'path'
op|'=='
name|'fq_path'
op|'('
name|'p'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
dedent|''
name|'if'
name|'path'
op|'=='
name|'fq_path'
op|'('
name|'p'
op|')'
op|'+'
string|"'.info'"
op|':'
newline|'\n'
indent|' '
name|'return'
name|'False'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'if'
name|'path'
name|'in'
op|'['
string|"'/instance_path/_base/%s_sm'"
op|'%'
name|'i'
name|'for'
name|'i'
name|'in'
op|'['
name|'hashed_1'
op|','
nl|'\n'
name|'hashed_21'
op|','
nl|'\n'
name|'hashed_22'
op|','
nl|'\n'
name|'hashed_42'
op|']'
op|']'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'False'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
string|"'Unexpected path existence check: %s'"
op|'%'
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
name|'lambda'
name|'x'
op|':'
name|'exists'
op|'('
name|'x'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake up some instances in the instances directory'
nl|'\n'
name|'orig_listdir'
op|'='
name|'os'
op|'.'
name|'listdir'
newline|'\n'
nl|'\n'
DECL|function|listdir
name|'def'
name|'listdir'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
comment|'# The python coverage tool got angry with my overly broad mocks'
nl|'\n'
indent|' '
name|'if'
name|'not'
name|'path'
op|'.'
name|'startswith'
op|'('
string|"'/instance_path'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'orig_listdir'
op|'('
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'path'
op|'=='
string|"'/instance_path'"
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
string|"'instance-1'"
op|','
string|"'instance-2'"
op|','
string|"'instance-3'"
op|','
string|"'_base'"
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'path'
op|'=='
string|"'/instance_path/_base'"
op|':'
newline|'\n'
indent|' '
name|'return'
name|'base_file_list'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
string|"'Unexpected directory listed: %s'"
op|'%'
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.listdir'"
op|','
name|'lambda'
name|'x'
op|':'
name|'listdir'
op|'('
name|'x'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake isfile for these faked images in _base'
nl|'\n'
name|'orig_isfile'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'isfile'
newline|'\n'
nl|'\n'
DECL|function|isfile
name|'def'
name|'isfile'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
comment|'# The python coverage tool got angry with my overly broad mocks'
nl|'\n'
indent|' '
name|'if'
name|'not'
name|'path'
op|'.'
name|'startswith'
op|'('
string|"'/instance_path'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'orig_isfile'
op|'('
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'for'
name|'p'
name|'in'
name|'base_file_list'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'path'
op|'=='
name|'fq_path'
op|'('
name|'p'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'True'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
string|"'Unexpected isfile call: %s'"
op|'%'
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.isfile'"
op|','
name|'lambda'
name|'x'
op|':'
name|'isfile'
op|'('
name|'x'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake the database call which lists running instances'
nl|'\n'
name|'instances'
op|'='
op|'['
op|'{'
string|"'image_ref'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'host'"
op|':'
name|'CONF'
op|'.'
name|'host'
op|','
nl|'\n'
string|"'name'"
op|':'
string|"'instance-1'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'123'"
op|','
nl|'\n'
string|"'vm_state'"
op|':'
string|"''"
op|','
nl|'\n'
string|"'task_state'"
op|':'
string|"''"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'image_ref'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'kernel_id'"
op|':'
string|"'21'"
op|','
nl|'\n'
string|"'ramdisk_id'"
op|':'
string|"'22'"
op|','
nl|'\n'
string|"'host'"
op|':'
name|'CONF'
op|'.'
name|'host'
op|','
nl|'\n'
string|"'name'"
op|':'
string|"'instance-2'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'456'"
op|','
nl|'\n'
string|"'vm_state'"
op|':'
string|"''"
op|','
nl|'\n'
string|"'task_state'"
op|':'
string|"''"
op|'}'
op|']'
newline|'\n'
name|'all_instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'None'
op|','
op|'**'
name|'instance'
op|')'
nl|'\n'
name|'for'
name|'instance'
name|'in'
name|'instances'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake the utils call which finds the backing image'
nl|'\n'
DECL|function|get_disk_backing_file
name|'def'
name|'get_disk_backing_file'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'path'
name|'in'
op|'['
string|"'/instance_path/instance-1/disk'"
op|','
nl|'\n'
string|"'/instance_path/instance-2/disk'"
op|']'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'fq_path'
op|'('
string|"'%s_5368709120'"
op|'%'
name|'hashed_1'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
string|"'Unexpected backing file lookup: %s'"
op|'%'
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'libvirt_utils'
op|','
string|"'get_disk_backing_file'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|':'
name|'get_disk_backing_file'
op|'('
name|'x'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake out verifying checksums, as that is tested elsewhere'
nl|'\n'
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'image_cache_manager'
op|','
string|"'_verify_checksum'"
op|','
nl|'\n'
name|'lambda'
name|'x'
op|','
name|'y'
op|':'
name|'True'
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake getmtime as well'
nl|'\n'
name|'orig_getmtime'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'getmtime'
newline|'\n'
nl|'\n'
DECL|function|getmtime
name|'def'
name|'getmtime'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'path'
op|'.'
name|'startswith'
op|'('
string|"'/instance_path'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'orig_getmtime'
op|'('
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
number|'1000000'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.getmtime'"
op|','
name|'lambda'
name|'x'
op|':'
name|'getmtime'
op|'('
name|'x'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|"# Make sure we don't accidentally remove a real file"
nl|'\n'
name|'orig_remove'
op|'='
name|'os'
op|'.'
name|'remove'
newline|'\n'
nl|'\n'
DECL|function|remove
name|'def'
name|'remove'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'path'
op|'.'
name|'startswith'
op|'('
string|"'/instance_path'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'orig_remove'
op|'('
name|'path'
op|')'
newline|'\n'
nl|'\n'
comment|"# Don't try to remove fake files"
nl|'\n'
dedent|''
name|'return'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.remove'"
op|','
name|'lambda'
name|'x'
op|':'
name|'remove'
op|'('
name|'x'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'objects'
op|'.'
name|'block_device'
op|'.'
name|'BlockDeviceMappingList'
op|','
nl|'\n'
string|"'bdms_by_instance_uuid'"
op|')'
newline|'\n'
nl|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'get_admin_context'
op|'('
op|')'
newline|'\n'
name|'objects'
op|'.'
name|'block_device'
op|'.'
name|'BlockDeviceMappingList'
op|'.'
name|'bdms_by_instance_uuid'
op|'('
nl|'\n'
name|'ctxt'
op|','
op|'['
string|"'123'"
op|','
string|"'456'"
op|']'
op|')'
op|'.'
name|'AndReturn'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
comment|"# And finally we can make the call we're actually testing..."
nl|'\n'
comment|'# The argument here should be a context, but it is mocked out'
nl|'\n'
name|'image_cache_manager'
op|'.'
name|'update'
op|'('
name|'ctxt'
op|','
name|'all_instances'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify'
nl|'\n'
name|'active'
op|'='
op|'['
name|'fq_path'
op|'('
name|'hashed_1'
op|')'
op|','
name|'fq_path'
op|'('
string|"'%s_5368709120'"
op|'%'
name|'hashed_1'
op|')'
op|','
nl|'\n'
name|'fq_path'
op|'('
name|'hashed_21'
op|')'
op|','
name|'fq_path'
op|'('
name|'hashed_22'
op|')'
op|']'
newline|'\n'
name|'for'
name|'act'
name|'in'
name|'active'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'act'
op|','
name|'image_cache_manager'
op|'.'
name|'active_base_files'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'active_base_files'
op|')'
op|','
nl|'\n'
name|'len'
op|'('
name|'active'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'rem'
name|'in'
op|'['
name|'fq_path'
op|'('
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3_sm'"
op|')'
op|','
nl|'\n'
name|'fq_path'
op|'('
string|"'e09c675c2d1cfac32dae3c2d83689c8c94bc693b_sm'"
op|')'
op|','
nl|'\n'
name|'fq_path'
op|'('
name|'hashed_42'
op|')'
op|','
nl|'\n'
name|'fq_path'
op|'('
string|"'%s_10737418240'"
op|'%'
name|'hashed_1'
op|')'
op|']'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'rem'
op|','
name|'image_cache_manager'
op|'.'
name|'removable_base_files'
op|')'
newline|'\n'
nl|'\n'
comment|'# Ensure there are no "corrupt" images as well'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'corrupt_base_files'
op|')'
op|','
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_base_images_no_base
dedent|''
name|'def'
name|'test_verify_base_images_no_base'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
string|"'/tmp/no/such/dir/name/please'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'update'
op|'('
name|'None'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_valid_info_file
dedent|''
name|'def'
name|'test_is_valid_info_file'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hashed'
op|'='
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3'"
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
string|"'/tmp/no/such/dir/name/please'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/_base/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
name|'base_filename'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
string|"'_base'"
op|','
name|'hashed'
op|')'
newline|'\n'
nl|'\n'
name|'is_valid_info_file'
op|'='
name|'imagecache'
op|'.'
name|'is_valid_info_file'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'is_valid_info_file'
op|'('
string|"'banana'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'is_valid_info_file'
op|'('
nl|'\n'
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
string|"'_base'"
op|','
string|"'00000001'"
op|')'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'is_valid_info_file'
op|'('
name|'base_filename'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'is_valid_info_file'
op|'('
name|'base_filename'
op|'+'
string|"'.sha1'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'is_valid_info_file'
op|'('
name|'base_filename'
op|'+'
string|"'.info'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_configured_checksum_path
dedent|''
name|'def'
name|'test_configured_checksum_path'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
comment|'# Ensure there is a base directory'
nl|'\n'
name|'os'
op|'.'
name|'mkdir'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'_base'"
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Fake the database call which lists running instances'
nl|'\n'
name|'instances'
op|'='
op|'['
op|'{'
string|"'image_ref'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'host'"
op|':'
name|'CONF'
op|'.'
name|'host'
op|','
nl|'\n'
string|"'name'"
op|':'
string|"'instance-1'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'123'"
op|','
nl|'\n'
string|"'vm_state'"
op|':'
string|"''"
op|','
nl|'\n'
string|"'task_state'"
op|':'
string|"''"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'image_ref'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'host'"
op|':'
name|'CONF'
op|'.'
name|'host'
op|','
nl|'\n'
string|"'name'"
op|':'
string|"'instance-2'"
op|','
nl|'\n'
string|"'uuid'"
op|':'
string|"'456'"
op|','
nl|'\n'
string|"'vm_state'"
op|':'
string|"''"
op|','
nl|'\n'
string|"'task_state'"
op|':'
string|"''"
op|'}'
op|']'
newline|'\n'
nl|'\n'
name|'all_instances'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'instance'
name|'in'
name|'instances'
op|':'
newline|'\n'
indent|' '
name|'all_instances'
op|'.'
name|'append'
op|'('
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
nl|'\n'
name|'None'
op|','
op|'**'
name|'instance'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|function|touch
dedent|''
name|'def'
name|'touch'
op|'('
name|'filename'
op|')'
op|':'
newline|'\n'
indent|' '
name|'f'
op|'='
name|'open'
op|'('
name|'filename'
op|','
string|"'w'"
op|')'
newline|'\n'
name|'f'
op|'.'
name|'write'
op|'('
string|"'Touched'"
op|')'
newline|'\n'
name|'f'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'old'
op|'='
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'-'
op|'('
number|'25'
op|'*'
number|'3600'
op|')'
newline|'\n'
name|'hashed'
op|'='
string|"'e97222e91fc4241f49a7f520d1dcf446751129b3'"
newline|'\n'
name|'base_filename'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
name|'hashed'
op|')'
newline|'\n'
name|'touch'
op|'('
name|'base_filename'
op|')'
newline|'\n'
name|'touch'
op|'('
name|'base_filename'
op|'+'
string|"'.info'"
op|')'
newline|'\n'
name|'os'
op|'.'
name|'utime'
op|'('
name|'base_filename'
op|'+'
string|"'.info'"
op|','
op|'('
name|'old'
op|','
name|'old'
op|')'
op|')'
newline|'\n'
name|'touch'
op|'('
name|'base_filename'
op|'+'
string|"'.info'"
op|')'
newline|'\n'
name|'os'
op|'.'
name|'utime'
op|'('
name|'base_filename'
op|'+'
string|"'.info'"
op|','
op|'('
name|'old'
op|','
name|'old'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
nl|'\n'
name|'objects'
op|'.'
name|'block_device'
op|'.'
name|'BlockDeviceMappingList'
op|','
nl|'\n'
string|"'bdms_by_instance_uuid'"
op|')'
newline|'\n'
nl|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'get_admin_context'
op|'('
op|')'
newline|'\n'
name|'objects'
op|'.'
name|'block_device'
op|'.'
name|'BlockDeviceMappingList'
op|'.'
name|'bdms_by_instance_uuid'
op|'('
nl|'\n'
name|'ctxt'
op|','
op|'['
string|"'123'"
op|','
string|"'456'"
op|']'
op|')'
op|'.'
name|'AndReturn'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'update'
op|'('
name|'ctxt'
op|','
nl|'\n'
name|'all_instances'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base_filename'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'base_filename'
op|'+'
string|"'.info'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_run_image_cache_manager_pass
dedent|''
dedent|''
name|'def'
name|'test_run_image_cache_manager_pass'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'was'
op|'='
op|'{'
string|"'called'"
op|':'
name|'False'
op|'}'
newline|'\n'
nl|'\n'
DECL|function|fake_get_all_by_filters
name|'def'
name|'fake_get_all_by_filters'
op|'('
name|'context'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'was'
op|'['
string|"'called'"
op|']'
op|'='
name|'True'
newline|'\n'
name|'instances'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'x'
name|'in'
name|'range'
op|'('
number|'2'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instances'
op|'.'
name|'append'
op|'('
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
nl|'\n'
name|'image_ref'
op|'='
string|"'1'"
op|','
nl|'\n'
name|'uuid'
op|'='
name|'x'
op|','
nl|'\n'
name|'name'
op|'='
name|'x'
op|','
nl|'\n'
name|'vm_state'
op|'='
string|"''"
op|','
nl|'\n'
name|'task_state'
op|'='
string|"''"
op|')'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'instances'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.db.instance_get_all_by_filters'"
op|','
nl|'\n'
name|'fake_get_all_by_filters'
op|')'
newline|'\n'
name|'compute'
op|'='
name|'importutils'
op|'.'
name|'import_object'
op|'('
name|'CONF'
op|'.'
name|'compute_manager'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_local'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'conductor'"
op|')'
newline|'\n'
name|'compute'
op|'.'
name|'conductor_api'
op|'='
name|'conductor'
op|'.'
name|'API'
op|'('
op|')'
newline|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'get_admin_context'
op|'('
op|')'
newline|'\n'
name|'compute'
op|'.'
name|'_run_image_cache_manager_pass'
op|'('
name|'ctxt'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'was'
op|'['
string|"'called'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_store_swap_image
dedent|''
dedent|''
name|'def'
name|'test_store_swap_image'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_store_swap_image'
op|'('
string|"'swap_'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_store_swap_image'
op|'('
string|"'swap_123'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_store_swap_image'
op|'('
string|"'swap_456'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_store_swap_image'
op|'('
string|"'swap_abc'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_store_swap_image'
op|'('
string|"'123_swap'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_store_swap_image'
op|'('
string|"'swap_129_'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'image_cache_manager'
op|'.'
name|'back_swap_images'
op|')'
op|','
number|'2'
op|')'
newline|'\n'
name|'expect_set'
op|'='
name|'set'
op|'('
op|'['
string|"'swap_123'"
op|','
string|"'swap_456'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image_cache_manager'
op|'.'
name|'back_swap_images'
op|','
name|'expect_set'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'lockutils'
op|','
string|"'external_lock'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'libvirt_utils'
op|','
string|"'update_mtime'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.exists'"
op|','
name|'return_value'
op|'='
name|'True'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.getmtime'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.remove'"
op|')'
newline|'\n'
DECL|member|test_age_and_verify_swap_images
name|'def'
name|'test_age_and_verify_swap_images'
op|'('
name|'self'
op|','
name|'mock_remove'
op|','
name|'mock_getmtime'
op|','
nl|'\n'
name|'mock_exist'
op|','
name|'mock_mtime'
op|','
name|'mock_lock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'expected_remove'
op|'='
name|'set'
op|'('
op|')'
newline|'\n'
name|'expected_exist'
op|'='
name|'set'
op|'('
op|'['
string|"'swap_128'"
op|','
string|"'swap_256'"
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'.'
name|'back_swap_images'
op|'.'
name|'add'
op|'('
string|"'swap_128'"
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'back_swap_images'
op|'.'
name|'add'
op|'('
string|"'swap_256'"
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'.'
name|'used_swap_images'
op|'.'
name|'add'
op|'('
string|"'swap_128'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|getmtime
name|'def'
name|'getmtime'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'-'
number|'1000000'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_getmtime'
op|'.'
name|'side_effect'
op|'='
name|'getmtime'
newline|'\n'
nl|'\n'
DECL|function|removefile
name|'def'
name|'removefile'
op|'('
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'path'
op|'.'
name|'startswith'
op|'('
string|"'/tmp_age_test'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'os'
op|'.'
name|'remove'
op|'('
name|'path'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'fn'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'split'
op|'('
name|'path'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
name|'expected_remove'
op|'.'
name|'add'
op|'('
name|'fn'
op|')'
newline|'\n'
name|'expected_exist'
op|'.'
name|'remove'
op|'('
name|'fn'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_remove'
op|'.'
name|'side_effect'
op|'='
name|'removefile'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'.'
name|'_age_and_verify_swap_images'
op|'('
name|'None'
op|','
string|"'/tmp_age_test'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'expected_exist'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'len'
op|'('
name|'expected_remove'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'swap_128'"
op|','
name|'expected_exist'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'swap_256'"
op|','
name|'expected_remove'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'utils'
op|','
string|"'synchronized'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|','
string|"'_get_age_of_file'"
op|','
nl|'\n'
name|'return_value'
op|'='
op|'('
name|'True'
op|','
number|'100'
op|')'
op|')'
newline|'\n'
DECL|member|test_lock_acquired_on_removing_old_enough_files
name|'def'
name|'test_lock_acquired_on_removing_old_enough_files'
op|'('
name|'self'
op|','
name|'mock_get_age'
op|','
nl|'\n'
name|'mock_synchronized'
op|')'
op|':'
newline|'\n'
indent|' '
name|'base_file'
op|'='
string|"'/tmp_age_test'"
newline|'\n'
name|'lock_path'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'CONF'
op|'.'
name|'instances_path'
op|','
string|"'locks'"
op|')'
newline|'\n'
name|'lock_file'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'split'
op|'('
name|'base_file'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'image_cache_manager'
op|'.'
name|'_remove_old_enough_file'
op|'('
nl|'\n'
name|'base_file'
op|','
number|'60'
op|','
name|'remove_sig'
op|'='
name|'False'
op|','
name|'remove_lock'
op|'='
name|'False'
op|')'
newline|'\n'
name|'mock_synchronized'
op|'.'
name|'assert_called_once_with'
op|'('
name|'lock_file'
op|','
name|'external'
op|'='
name|'True'
op|','
nl|'\n'
name|'lock_path'
op|'='
name|'lock_path'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VerifyChecksumTestCase
dedent|''
dedent|''
name|'class'
name|'VerifyChecksumTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VerifyChecksumTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'img'
op|'='
op|'{'
string|"'container_format'"
op|':'
string|"'ami'"
op|','
string|"'id'"
op|':'
string|"'42'"
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'checksum_base_images'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_make_checksum
dedent|''
name|'def'
name|'_make_checksum'
op|'('
name|'self'
op|','
name|'tmpdir'
op|')'
op|':'
newline|'\n'
indent|' '
name|'testdata'
op|'='
op|'('
string|"'OpenStack Software delivers a massively scalable cloud '"
nl|'\n'
string|"'operating system.'"
op|')'
newline|'\n'
nl|'\n'
name|'fname'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'tmpdir'
op|','
string|"'aaa'"
op|')'
newline|'\n'
name|'info_fname'
op|'='
name|'imagecache'
op|'.'
name|'get_info_filename'
op|'('
name|'fname'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'open'
op|'('
name|'fname'
op|','
string|"'w'"
op|')'
name|'as'
name|'f'
op|':'
newline|'\n'
indent|' '
name|'f'
op|'.'
name|'write'
op|'('
name|'testdata'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
name|'fname'
op|','
name|'info_fname'
op|','
name|'testdata'
newline|'\n'
nl|'\n'
DECL|member|_write_file
dedent|''
name|'def'
name|'_write_file'
op|'('
name|'self'
op|','
name|'info_fname'
op|','
name|'info_attr'
op|','
name|'testdata'
op|')'
op|':'
newline|'\n'
indent|' '
name|'f'
op|'='
name|'open'
op|'('
name|'info_fname'
op|','
string|"'w'"
op|')'
newline|'\n'
name|'if'
name|'info_attr'
op|'=='
string|'"csum valid"'
op|':'
newline|'\n'
indent|' '
name|'csum'
op|'='
name|'hashlib'
op|'.'
name|'sha1'
op|'('
op|')'
newline|'\n'
name|'csum'
op|'.'
name|'update'
op|'('
name|'testdata'
op|')'
newline|'\n'
name|'f'
op|'.'
name|'write'
op|'('
string|'\'{"sha1": "%s"}\\n\''
op|'%'
name|'csum'
op|'.'
name|'hexdigest'
op|'('
op|')'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'info_attr'
op|'=='
string|'"csum invalid, not json"'
op|':'
newline|'\n'
indent|' '
name|'f'
op|'.'
name|'write'
op|'('
string|"'banana'"
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'f'
op|'.'
name|'write'
op|'('
string|'\'{"sha1": "banana"}\''
op|')'
newline|'\n'
dedent|''
name|'f'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_check_body
dedent|''
name|'def'
name|'_check_body'
op|'('
name|'self'
op|','
name|'tmpdir'
op|','
name|'info_attr'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
name|'fname'
op|','
name|'info_fname'
op|','
name|'testdata'
op|'='
name|'self'
op|'.'
name|'_make_checksum'
op|'('
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_write_file'
op|'('
name|'info_fname'
op|','
name|'info_attr'
op|','
name|'testdata'
op|')'
newline|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'return'
name|'image_cache_manager'
op|','
name|'fname'
newline|'\n'
nl|'\n'
DECL|member|test_verify_checksum
dedent|''
name|'def'
name|'test_verify_checksum'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|','
name|'fname'
op|'='
name|'self'
op|'.'
name|'_check_body'
op|'('
name|'tmpdir'
op|','
string|'"csum valid"'
op|')'
newline|'\n'
name|'res'
op|'='
name|'image_cache_manager'
op|'.'
name|'_verify_checksum'
op|'('
name|'self'
op|'.'
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_checksum_disabled
dedent|''
dedent|''
name|'def'
name|'test_verify_checksum_disabled'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'checksum_base_images'
op|'='
name|'False'
op|','
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|','
name|'fname'
op|'='
name|'self'
op|'.'
name|'_check_body'
op|'('
name|'tmpdir'
op|','
string|'"csum valid"'
op|')'
newline|'\n'
name|'res'
op|'='
name|'image_cache_manager'
op|'.'
name|'_verify_checksum'
op|'('
name|'self'
op|'.'
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_checksum_invalid_json
dedent|''
dedent|''
name|'def'
name|'test_verify_checksum_invalid_json'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'intercept_log_messages'
op|'('
op|')'
name|'as'
name|'stream'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|','
name|'fname'
op|'='
op|'('
nl|'\n'
name|'self'
op|'.'
name|'_check_body'
op|'('
name|'tmpdir'
op|','
string|'"csum invalid, not json"'
op|')'
op|')'
newline|'\n'
name|'res'
op|'='
name|'image_cache_manager'
op|'.'
name|'_verify_checksum'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'img'
op|','
name|'fname'
op|','
name|'create_if_missing'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'res'
op|')'
newline|'\n'
name|'log'
op|'='
name|'stream'
op|'.'
name|'getvalue'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# NOTE(mikal): this is a skip not a fail because the file is'
nl|'\n'
comment|'# present, but is not in valid JSON format and therefore is'
nl|'\n'
comment|'# skipped.'
nl|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
name|'log'
op|'.'
name|'find'
op|'('
string|"'image verification skipped'"
op|')'
op|','
op|'-'
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_checksum_invalid_repaired
dedent|''
dedent|''
dedent|''
name|'def'
name|'test_verify_checksum_invalid_repaired'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|','
name|'fname'
op|'='
op|'('
nl|'\n'
name|'self'
op|'.'
name|'_check_body'
op|'('
name|'tmpdir'
op|','
string|'"csum invalid, not json"'
op|')'
op|')'
newline|'\n'
name|'res'
op|'='
name|'image_cache_manager'
op|'.'
name|'_verify_checksum'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'img'
op|','
name|'fname'
op|','
name|'create_if_missing'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_checksum_invalid
dedent|''
dedent|''
name|'def'
name|'test_verify_checksum_invalid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'intercept_log_messages'
op|'('
op|')'
name|'as'
name|'stream'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'image_cache_manager'
op|','
name|'fname'
op|'='
op|'('
nl|'\n'
name|'self'
op|'.'
name|'_check_body'
op|'('
name|'tmpdir'
op|','
string|'"csum invalid, valid json"'
op|')'
op|')'
newline|'\n'
name|'res'
op|'='
name|'image_cache_manager'
op|'.'
name|'_verify_checksum'
op|'('
name|'self'
op|'.'
name|'img'
op|','
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'res'
op|')'
newline|'\n'
name|'log'
op|'='
name|'stream'
op|'.'
name|'getvalue'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
name|'log'
op|'.'
name|'find'
op|'('
string|"'image verification failed'"
op|')'
op|','
op|'-'
number|'1'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_verify_checksum_file_missing
dedent|''
dedent|''
dedent|''
name|'def'
name|'test_verify_checksum_file_missing'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'tempdir'
op|'('
op|')'
name|'as'
name|'tmpdir'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'instances_path'
op|'='
name|'tmpdir'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'image_info_filename_pattern'
op|'='
op|'('
string|"'$instances_path/'"
nl|'\n'
string|"'%(image)s.info'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'libvirt'"
op|')'
newline|'\n'
name|'fname'
op|','
name|'info_fname'
op|','
name|'testdata'
op|'='
name|'self'
op|'.'
name|'_make_checksum'
op|'('
name|'tmpdir'
op|')'
newline|'\n'
nl|'\n'
name|'image_cache_manager'
op|'='
name|'imagecache'
op|'.'
name|'ImageCacheManager'
op|'('
op|')'
newline|'\n'
name|'res'
op|'='
name|'image_cache_manager'
op|'.'
name|'_verify_checksum'
op|'('
string|"'aaa'"
op|','
name|'fname'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'res'
op|')'
newline|'\n'
nl|'\n'
comment|'# Checksum requests for a file with no checksum now have the'
nl|'\n'
comment|'# side effect of creating the checksum'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'info_fname'
op|')'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
endmarker|''
end_unit
| 12.510379 | 88 | 0.603639 | 12,792 | 87,385 | 4.008521 | 0.038305 | 0.164869 | 0.101995 | 0.054527 | 0.915186 | 0.877001 | 0.838641 | 0.802992 | 0.766738 | 0.733448 | 0 | 0.019737 | 0.099548 | 87,385 | 6,984 | 89 | 12.512171 | 0.631929 | 0 | 0 | 0.96449 | 0 | 0 | 0.378841 | 0.051771 | 0 | 0 | 0 | 0 | 0.013459 | 0 | null | null | 0.00043 | 0.003436 | null | null | 0.002148 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
04144afa27849bce0388d56c69ac5b11ab573910 | 135 | py | Python | techfest/app/views.py | Horizonbcrec/techfest_starter | 3b8238f2c9d12b3c0d7e544bc24885a241357ed8 | [
"MIT"
] | null | null | null | techfest/app/views.py | Horizonbcrec/techfest_starter | 3b8238f2c9d12b3c0d7e544bc24885a241357ed8 | [
"MIT"
] | null | null | null | techfest/app/views.py | Horizonbcrec/techfest_starter | 3b8238f2c9d12b3c0d7e544bc24885a241357ed8 | [
"MIT"
] | null | null | null | from django.shortcuts import render_to_response
# Create your views here.
def home(request):
return render_to_response('index.html')
| 22.5 | 47 | 0.807407 | 20 | 135 | 5.25 | 0.85 | 0.152381 | 0.304762 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 135 | 5 | 48 | 27 | 0.875 | 0.17037 | 0 | 0 | 0 | 0 | 0.090909 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
042ba52ba5e331739f90d035fff0d7ed16a0b0f0 | 65,861 | py | Python | post_optimization_studies/mad_analyses/ma100MeV_L2TeV_deta2_1/Output/Histos/MadAnalysis5job_0/selection_11.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | post_optimization_studies/mad_analyses/ma100MeV_L2TeV_deta2_1/Output/Histos/MadAnalysis5job_0/selection_11.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | post_optimization_studies/mad_analyses/ma100MeV_L2TeV_deta2_1/Output/Histos/MadAnalysis5job_0/selection_11.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | def selection_11():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,2000.0,401,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([2.5,7.5,12.5,17.5,22.5,27.5,32.5,37.5,42.5,47.5,52.5,57.5,62.5,67.5,72.5,77.5,82.5,87.5,92.5,97.5,102.5,107.5,112.5,117.5,122.5,127.5,132.5,137.5,142.5,147.5,152.5,157.5,162.5,167.5,172.5,177.5,182.5,187.5,192.5,197.5,202.5,207.5,212.5,217.5,222.5,227.5,232.5,237.5,242.5,247.5,252.5,257.5,262.5,267.5,272.5,277.5,282.5,287.5,292.5,297.5,302.5,307.5,312.5,317.5,322.5,327.5,332.5,337.5,342.5,347.5,352.5,357.5,362.5,367.5,372.5,377.5,382.5,387.5,392.5,397.5,402.5,407.5,412.5,417.5,422.5,427.5,432.5,437.5,442.5,447.5,452.5,457.5,462.5,467.5,472.5,477.5,482.5,487.5,492.5,497.5,502.5,507.5,512.5,517.5,522.5,527.5,532.5,537.5,542.5,547.5,552.5,557.5,562.5,567.5,572.5,577.5,582.5,587.5,592.5,597.5,602.5,607.5,612.5,617.5,622.5,627.5,632.5,637.5,642.5,647.5,652.5,657.5,662.5,667.5,672.5,677.5,682.5,687.5,692.5,697.5,702.5,707.5,712.5,717.5,722.5,727.5,732.5,737.5,742.5,747.5,752.5,757.5,762.5,767.5,772.5,777.5,782.5,787.5,792.5,797.5,802.5,807.5,812.5,817.5,822.5,827.5,832.5,837.5,842.5,847.5,852.5,857.5,862.5,867.5,872.5,877.5,882.5,887.5,892.5,897.5,902.5,907.5,912.5,917.5,922.5,927.5,932.5,937.5,942.5,947.5,952.5,957.5,962.5,967.5,972.5,977.5,982.5,987.5,992.5,997.5,1002.5,1007.5,1012.5,1017.5,1022.5,1027.5,1032.5,1037.5,1042.5,1047.5,1052.5,1057.5,1062.5,1067.5,1072.5,1077.5,1082.5,1087.5,1092.5,1097.5,1102.5,1107.5,1112.5,1117.5,1122.5,1127.5,1132.5,1137.5,1142.5,1147.5,1152.5,1157.5,1162.5,1167.5,1172.5,1177.5,1182.5,1187.5,1192.5,1197.5,1202.5,1207.5,1212.5,1217.5,1222.5,1227.5,1232.5,1237.5,1242.5,1247.5,1252.5,1257.5,1262.5,1267.5,1272.5,1277.5,1282.5,1287.5,1292.5,1297.5,1302.5,1307.5,1312.5,1317.5,1322.5,1327.5,1332.5,1337.5,1342.5,1347.5,1352.5,1357.5,1362.5,1367.5,1372.5,1377.5,1382.5,1387.5,1392.5,1397.5,1402.5,1407.5,1412.5,1417.5,1422.5,1427.5,1432.5,1437.5,1442.5,1447.5,1452.5,1457.5,1462.5,1467.5,1472.5,1477.5,1482.5,1487.5,1492.5,1497.5,1502.5,1507.5,1512.5,1517.5,1522.5,1527.5,1532.5,1537.5,1542.5,1547.5,1552.5,1557.5,1562.5,1567.5,1572.5,1577.5,1582.5,1587.5,1592.5,1597.5,1602.5,1607.5,1612.5,1617.5,1622.5,1627.5,1632.5,1637.5,1642.5,1647.5,1652.5,1657.5,1662.5,1667.5,1672.5,1677.5,1682.5,1687.5,1692.5,1697.5,1702.5,1707.5,1712.5,1717.5,1722.5,1727.5,1732.5,1737.5,1742.5,1747.5,1752.5,1757.5,1762.5,1767.5,1772.5,1777.5,1782.5,1787.5,1792.5,1797.5,1802.5,1807.5,1812.5,1817.5,1822.5,1827.5,1832.5,1837.5,1842.5,1847.5,1852.5,1857.5,1862.5,1867.5,1872.5,1877.5,1882.5,1887.5,1892.5,1897.5,1902.5,1907.5,1912.5,1917.5,1922.5,1927.5,1932.5,1937.5,1942.5,1947.5,1952.5,1957.5,1962.5,1967.5,1972.5,1977.5,1982.5,1987.5,1992.5,1997.5])
# Creating weights for histo: y12_PT_0
y12_PT_0_weights = numpy.array([0.00213916317681,0.0117553101814,0.0138944745572,0.0267096167645,0.0331315982839,0.0406071568574,0.0352547515307,0.0512950850822,0.0577218787332,0.0780118081341,0.0598349400927,0.0844241334159,0.0769355533101,0.126108664198,0.120746534688,0.101518711485,0.118620923417,0.154940886649,0.15600762912,0.172047175304,0.163514434575,0.169918126796,0.159219766957,0.212661566346,0.208370415902,0.22761258757,0.25112045015,0.239358785077,0.271431322715,0.278926937166,0.275718436405,0.304548580527,0.346198539101,0.31309702863,0.334473164854,0.313081880807,0.295998813485,0.316284586227,0.410328546489,0.380406080459,0.384702626566,0.370835653901,0.430692576322,0.40284736044,0.42952751289,0.431694171164,0.413574736953,0.445581007861,0.428505534434,0.458405098874,0.434894798255,0.44451746282,0.457346749657,0.480907809472,0.45733955544,0.478707178508,0.527894837701,0.522543939163,0.528939597842,0.508626726884,0.487266697711,0.466985801049,0.460582948152,0.508651107285,0.501154733444,0.473388653945,0.491570837714,0.45843427542,0.470133270872,0.457354343552,0.464821940526,0.444488685953,0.450917118287,0.444536647398,0.404992036385,0.414592718621,0.430630226444,0.4124776189,0.407089550242,0.388933705301,0.371853076019,0.400741453329,0.379354725619,0.368648931756,0.387851735072,0.371870182268,0.408210649017,0.368635422615,0.348364597857,0.385760376266,0.359037498162,0.369719151431,0.323760056871,0.349434178046,0.310946517375,0.324835991952,0.323768609995,0.350505396918,0.3002768145,0.285258847092,0.279969898754,0.27890415548,0.313110937449,0.310963623624,0.299188929025,0.288518786503,0.239362182346,0.272502341748,0.27782406374,0.306693096601,0.235071911195,0.277829299531,0.245792413234,0.242579915686,0.251106541331,0.240440435564,0.227611708277,0.214778264782,0.220105981955,0.236148845472,0.215845886546,0.216933532213,0.228698514619,0.205164313212,0.212655771005,0.186979571501,0.221181517357,0.219092116977,0.198769613762,0.210490511606,0.184883016872,0.236157958147,0.1763001964,0.186978892047,0.17097355836,0.165613707019,0.161359247079,0.182710563256,0.161364842581,0.167772691462,0.164569106748,0.168848746446,0.167770053582,0.161359366983,0.170998218536,0.17416123586,0.139997539256,0.157084083783,0.14747113141,0.168837875185,0.16348242031,0.141046975639,0.144244085557,0.139981112461,0.148553861029,0.151730387494,0.153879300034,0.161359926533,0.129289786968,0.126106026318,0.135729330369,0.130360806,0.132491852902,0.0983012978883,0.115401591363,0.094056749981,0.119690423671,0.120748053467,0.095102669191,0.100442456661,0.116455024533,0.0801393778306,0.103668743125,0.111132343311,0.100455166444,0.126092557146,0.11647760638,0.0843998329505,0.0929639084907,0.10792140451,0.10790825508,0.0758664127672,0.0951052671026,0.0801382587302,0.0790694379302,0.0940361265597,0.102584135016,0.0822789778562,0.0641113024251,0.0844024308621,0.0737327279864,0.0747952738307,0.0641103032284,0.0812153928473,0.0705255061969,0.0769391903864,0.0534361239512,0.0716018009887,0.0737252539946,0.0790739542996,0.0748154576054,0.0673265177888,0.0555749246194,0.0662588160892,0.0545053444298,0.0758622561086,0.0694522089954,0.0662498233183,0.0619722619497,0.0619752195721,0.0641043080477,0.0694566853969,0.0608999639449,0.057693821288,0.0480818281442,0.0683893034403,0.0587649602244,0.0459409091788,0.0662408305473,0.0566366311384,0.0651761664059,0.0512921274597,0.0416699425089,0.0534316075818,0.0545027065504,0.0555580581779,0.0395318772495,0.0352651711546,0.0545053444298,0.0373912860207,0.0288566668012,0.0545098607992,0.0352521216448,0.0352527891083,0.0320624497474,0.0502127951097,0.0405984838295,0.0480751535098,0.0448738469651,0.0341896637299,0.0363321214583,0.0406228242627,0.0373961101427,0.03526933181,0.0384667894487,0.0491712723655,0.0395312137829,0.040602640488,0.0320526975869,0.0213714599837,0.0288507435628,0.0235042654713,0.0267201323113,0.0299262469908,0.0256490213523,0.0288495445267,0.019246108504,0.0213759523724,0.0299132014777,0.0277788652207,0.0256389374585,0.0235016355854,0.0331312665506,0.0320466744288,0.0320665104831,0.0299225219852,0.0235106243596,0.0363298233057,0.0309846481668,0.0288510752961,0.025640468228,0.0213714599837,0.0235095252431,0.0256426624641,0.0245831661683,0.0277881817314,0.0181683069235,0.018167207807,0.0192333947243,0.0170938986152,0.0192289023357,0.023503933738,0.0203067039162,0.0213688300978,0.0299307433762,0.0181645779211,0.0170923678457,0.0160272800448,0.0235151167482,0.0149629596271,0.0235076627403,0.0256561436269,0.0128193028626,0.0106838674891,0.0170968602344,0.0170949977316,0.0288544685683,0.018167207807,0.0213762881025,0.011757176681,0.0149587989718,0.0160288108143,0.0149532074666,0.0181638145348,0.0170976276175,0.0149648221299,0.0160238867726,0.0170923678457,0.0160205894233,0.019233062991,0.0106894589942,0.0138982035596,0.0192367919933,0.0117534476786,0.0106894589942,0.0192356928769,0.0106875924946,0.00855029461836,0.0149587989718,0.010686497375,0.00961801230511,0.00747961531238,0.0128185354795,0.010683100106,0.00962064219101,0.00961428330277,0.0128237952513,0.00748257693161,0.00641189762563,0.00427087074701,0.00748334431473,0.00534417993888,0.0117508177927,0.00855215712114,0.00641299674208,0.016024650159,0.0106902223805,0.00961877968823,0.0138870205493,0.00747698542649,0.0128159055936,0.00854843211558,0.00854843211558,0.00855106200148,0.00427646225213,0.00320501676207,0.00320501676207,0.0117579400673,0.0117523485622,0.0021346695891,0.00427569486901,0.00213729907532,0.00854656561602,0.00855106200148,0.00854317234379,0.00534155005298,0.0021354353735,0.00854317234379,0.00534604244166,0.0106857299919,0.00854547049636,0.00641189762563,0.00641189762563,0.00427383236623,0.0053423174361,0.00427383236623,0.0106827683726,0.00534604244166,0.00213916317681,0.010686497375,0.00320501676207,0.00427646225213,0.00855292450426,0.00320874456538,0.00213729907532,0.00534155005298,0.00426933997756,0.00106771768675,0.00427273324979,0.0021346695891,0.00534155005298,0.00320501676207,0.00106958138857,0.00641189762563,0.00854547049636,0.00641003512285,0.00320688046388,0.00320501676207,0.0074852068175,0.00640740523696,0.00320688046388,0.00427383236623,0.00640816862329,0.00534417993888,0.00427459974935,0.0,0.00320501676207,0.00213729907532,0.00106771768675,0.0021354353735])
# Creating weights for histo: y12_PT_1
y12_PT_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_2
y12_PT_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05462838872,0.0,0.0,0.0,1.0521138287,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0529581672,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_3
y12_PT_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.691821959624,0.230541945396,0.0,0.229694011257,0.921958055545,0.0,0.461330812657,0.460911975383,0.0,0.0,0.461015724066,0.0,0.461055302267,0.689861878035,0.229922719464,1.3821349523,0.229694011257,0.460149998949,0.690326441579,0.461198244896,0.690241905616,0.0,0.0,0.0,0.460772491044,0.0,0.0,0.690858633895,1.15158309314,1.15090949521,0.230119227154,0.230010867419,0.230176942161,0.691994489841,0.230418945569,0.922139807867,0.230541945396,0.691168727179,0.460080064652,0.92083103745,0.230610342676,0.230754707046,0.230635895592,0.461006117706,0.460752125561,0.691074200602,0.460187655878,0.23082921397,0.921374757397,0.229973210489,0.690960077051,0.230488995143,0.0,0.92123988411,0.0,0.0,0.0,0.0,0.0,0.230350855693,0.0,0.230578411137,0.229973210489,0.0,0.690543161049,0.229943161797,0.0,0.0,0.0,0.230663830885,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230635895592,0.0,0.230742910437,0.0,0.0,0.0,0.0,0.230119227154,0.0,0.0,0.0,0.0,0.230663830885,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_4
y12_PT_4_weights = numpy.array([0.0,0.0,0.498469064704,1.24643348766,0.886133671842,0.775310748726,0.719814845745,1.05281757725,0.886172141738,0.968969360713,0.664421657388,0.775008375341,0.775587731979,0.609230435986,0.66458053806,0.913991262537,0.498321340302,0.664376647609,0.636801425953,0.719968340632,0.415025936171,0.747868632911,0.498576395715,0.719707514735,0.77541692564,0.71984292877,0.692388502588,0.88626793178,0.470858066045,0.774974137133,0.72028417848,0.719861779019,0.498341344648,0.720138377573,0.609271214076,0.581596355389,0.442988164983,0.49862255959,0.332114384664,0.49826286606,0.387640371103,0.442610005903,0.249283004421,0.360152091413,0.387327610847,0.249286812941,0.470861913034,0.387989677762,0.360077421344,0.248958780135,0.44322283135,0.249399529737,0.166112588983,0.166160099305,0.249095425207,0.276973328007,0.193801066013,0.24917586576,0.22156155869,0.193785485705,0.110764910513,0.110914481469,0.0553285974015,0.193819723913,0.138432036751,0.110657694912,0.138271925042,0.0553836478231,0.138491934379,0.0830636230066,0.249273771646,0.0831985369328,0.0553638358265,0.0554147314993,0.0,0.0276913699668,0.110632535599,0.0553794161345,0.0552963980983,0.0,0.0277222382115,0.0830750100959,0.0277323981112,0.0,0.0277390149333,0.0,0.0277196030236,0.0276848031555,0.0553702987691,0.0830321931013,0.0277280856358,0.0553993820107,0.0,0.0276887617078,0.0,0.0,0.0,0.0277278586634,0.0,0.0277648859386,0.0,0.027694628367,0.0277648859386,0.0,0.0,0.0277236231278,0.0,0.0,0.055403690639,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0276398356937,0.0,0.0,0.0,0.0276887617078,0.0,0.0276638986138,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_5
y12_PT_5_weights = numpy.array([0.0,0.0,0.342646784432,0.493961244953,0.493901713615,0.473931407503,0.554372869781,0.625159756946,0.494087104275,0.484125951875,0.524324229808,0.554510319811,0.50395983959,0.342879266139,0.312492614391,0.393308013574,0.403285854167,0.423461212664,0.302490500062,0.403397816774,0.231910485813,0.473809007189,0.393110061256,0.362852303047,0.483887219681,0.423494892473,0.383166932105,0.383264391156,0.453511976589,0.252015571845,0.252060478257,0.312434478794,0.373052066272,0.191586591296,0.181504373638,0.211601621767,0.231906541331,0.332676043907,0.292428186868,0.151355604504,0.141149651476,0.120981514415,0.121034127738,0.120932724205,0.171451587885,0.221658169949,0.120930236147,0.12105136209,0.191571116789,0.120956269729,0.181496241936,0.161258317884,0.161379322458,0.0907964560452,0.18158423423,0.100737400559,0.14112998975,0.141228905224,0.100803667859,0.100796203685,0.0605090292621,0.13098259711,0.151100608906,0.0705610200505,0.0403456498531,0.110945598908,0.030299478693,0.100867932575,0.0503742346915,0.0705540413514,0.0302883188428,0.050404722504,0.030203676325,0.0403087295005,0.0402721125696,0.0302492806067,0.0605331694927,0.0302838828175,0.0504209252229,0.0302209834989,0.0201737382259,0.0100726598747,0.0100946458111,0.0201620140114,0.0100968061737,0.0201634097512,0.0201620140114,0.0201673056858,0.0,0.0302631287732,0.0,0.0,0.0100822601373,0.0,0.0,0.0100846025528,0.010094196747,0.0,0.0100798570374,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100789892514,0.0100789892514,0.0,0.0100914780886,0.0,0.0,0.0,0.0100798570374,0.0100989847415,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100705298543,0.0100422509518,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100664397298,0.0,0.0,0.0,0.0,0.0,0.0,0.0100968061737,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100700261743,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100846025528,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_6
y12_PT_6_weights = numpy.array([0.0,0.0,0.198059152786,0.237684511085,0.384786166575,0.390430036109,0.370536559863,0.342254151982,0.31962342402,0.297122394635,0.325374753692,0.288520120191,0.274375126482,0.282884407316,0.305559804529,0.350855310657,0.280047813647,0.302796005196,0.271610442228,0.20090371074,0.200910020608,0.285792525652,0.206572896698,0.231997318918,0.22632309276,0.198081929872,0.164068353117,0.198068502164,0.164125642106,0.209317420152,0.181122196184,0.161336988047,0.158449953905,0.130122492024,0.121652917191,0.107554708848,0.147126086992,0.110358868254,0.104644628296,0.101830234591,0.101862245631,0.0905378400709,0.0990360786354,0.0990421191802,0.104633393653,0.0537551929347,0.0848957788525,0.0678759090405,0.0820483352878,0.107509462474,0.0622403115898,0.0707293162004,0.0792178206385,0.0735592922024,0.062211070736,0.0791756907241,0.0678999557953,0.0537400338605,0.0707275848341,0.0622323088298,0.0339359076605,0.0565940950921,0.0480761574262,0.0254465683191,0.036802380865,0.0283015298612,0.039612661613,0.0367758217053,0.0311118952537,0.031109425171,0.0254830886065,0.0339296824366,0.0282830773434,0.0282842854524,0.0339482234464,0.0141433662251,0.036806286058,0.0169712376551,0.0169832341001,0.0226212131415,0.0339264428578,0.0226209861401,0.016959360482,0.022633198044,0.0169666399156,0.016967651803,0.00283044000818,0.0198122828592,0.0169486991128,0.0113251250394,0.00566032520963,0.0169644353091,0.0197936995272,0.00283357685925,0.00847923585695,0.00566119858777,0.00282145383211,0.00283100443361,0.0113126976765,0.00848091720605,0.00849119767465,0.00283253034448,0.00848779265418,0.00282803148522,0.0141535158793,0.00283100443361,0.00565725876746,0.00564996394393,0.0,0.0,0.0,0.00566301075121,0.00566930138226,0.00283625432111,0.00283204132967,0.0028288163713,0.00565027174239,0.0,0.0,0.0,0.00565299575878,0.0028254663698,0.0,0.00283014759964,0.0,0.0,0.00283204132967,0.00565394608653,0.00282588651469,0.00282711001358,0.0,0.00282750322611,0.00283014759964,0.0,0.0028254663698,0.0,0.0,0.00566144482654,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0028235056936,0.0,0.0,0.0,0.00283625432111,0.0,0.0,0.0,0.0,0.0,0.00283625432111,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00283304552215,0.0,0.0,0.0,0.0,0.0,0.0,0.00282750322611,0.0,0.00283500542886,0.00282750322611,0.0,0.00282750322611,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0028293357812,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_7
y12_PT_7_weights = numpy.array([0.0,0.0,0.0441605064195,0.0486691136944,0.0670291824829,0.0609381877536,0.0563315069122,0.0730837836477,0.0594304189223,0.0761821875658,0.0869060594132,0.0609380932248,0.0487130104775,0.0608869768094,0.0549511746369,0.0563430748666,0.0411318528945,0.0410957074681,0.0472459480843,0.0411626101829,0.0365502339851,0.0319448292817,0.0380212193208,0.0258271538159,0.0258655088516,0.027417564397,0.0274245949719,0.0289473466083,0.0244029011253,0.025921989773,0.0182474850606,0.0319729161332,0.0259115679796,0.0182712354062,0.0136916843129,0.0197970001461,0.0228055899463,0.0182914527402,0.0106593890682,0.0198524530675,0.013722158015,0.00912413718778,0.0121834310218,0.0213220441043,0.0121960978728,0.0137624863382,0.0136673195305,0.0137152810493,0.0167896037712,0.00764720481271,0.0106620973166,0.0121860896426,0.00761321227826,0.0106604501533,0.0091139162679,0.016720219677,0.00609795558929,0.0106704583835,0.00457081072981,0.0060883089315,0.00304258351627,0.0106806816666,0.00914745742762,0.0,0.0045882382844,0.0106740670181,0.00608150995196,0.00456471480777,0.00762184747854,0.00457365958967,0.00763234489498,0.0030364190609,0.0030442507669,0.00610038970431,0.00608978594304,0.00458129514854,0.00457030263784,0.0,0.00458426453255,0.00152156175585,0.00457937739676,0.00152126280871,0.00302969452274,0.00152268901106,0.0,0.00153296664818,0.00152268901106,0.00457031563554,0.00456085685362,0.00152126280871,0.00152223999955,0.0045743543759,0.00455050831994,0.00305987991224,0.00153296664818,0.00151845294195,0.0,0.0,0.0,0.0015081327669,0.0,0.00152772030318,0.0,0.0,0.00304825996703,0.00152126280871,0.0,0.0,0.0,0.0,0.0,0.00304831668428,0.00152459258353,0.00152126280871,0.0,0.00151228966818,0.0,0.00152607668473,0.0015359253977,0.0,0.0,0.0,0.00152730792155,0.0,0.00306562253312,0.00152156175585,0.00152412939271,0.0,0.0,0.0,0.0,0.00303686925401,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00152772030318,0.0,0.0,0.0,0.00153296664818,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_8
y12_PT_8_weights = numpy.array([0.0,0.0,0.00812578068365,0.0135411448713,0.0164335570164,0.0173310263344,0.0187784532598,0.0204039629432,0.0157083126736,0.0175152711256,0.0137262446445,0.0149904512167,0.0135417572233,0.0142615250592,0.0113764303284,0.0122749279358,0.01101307449,0.0101112262771,0.0088487027102,0.0117373598663,0.0079457029674,0.00920819187287,0.00668500105155,0.00866688421958,0.00957167865454,0.00595831633351,0.00487243514937,0.00704417826128,0.00487494617788,0.00776507451944,0.00740097923699,0.00686295286636,0.00523448540707,0.00451340043665,0.0070411242036,0.00559904669327,0.00740078667345,0.00505473504883,0.00379321935947,0.00451346205698,0.00487500009567,0.00343018664262,0.00270914172542,0.00379124827912,0.00415136173277,0.00379269250564,0.00270563552855,0.00433287982427,0.00451445183356,0.00289036480958,0.00325339637105,0.00342995248536,0.00288624780118,0.00252830993036,0.00307125129268,0.00379215294261,0.00108306744575,0.00216646918179,0.0037912166987,0.00144397233553,0.000722135211941,0.00108364051483,0.00108342330316,0.0025301442906,0.00216737538579,0.00162560018824,0.00162473673335,0.00162416520477,0.00252890071529,0.00162601882137,0.00108392589399,0.00162481915054,0.00126272344785,0.000903602466667,0.001263657381,0.000902143220192,0.00144274686119,0.00108241080409,0.000541015341834,0.000902389316391,0.000541095833392,0.000180724113528,0.000722138292958,0.000902234110181,0.000360539288654,0.000541173243934,0.000180070360323,0.00036118653321,0.000180801331506,0.00126507965528,0.000903054815971,0.000540582459006,0.000540511210497,0.000361171051102,0.000540832021348,0.000180679746889,0.000541462089238,0.000361118288693,0.000180553964388,0.000541767495006,0.000360929114276,0.000540630985017,0.000360361051845,0.000180693110799,0.0,0.000360823396894,0.000541186338254,0.000542237350033,0.00018046892833,0.000180619975168,0.000360985342828,0.00054201821273,0.000361487009352,0.0,0.0,0.0,0.0,0.000360866800715,0.000179952357389,0.000180693110799,0.0,0.0,0.00018046892833,0.0,0.0,0.0,0.000180065430697,0.000181037337375,0.000181037337375,0.000181037337375,0.0,0.0,0.0,0.0,0.000180070360323,0.0,0.000361171051102,0.000181235600792,0.0,0.000360863719699,0.0,0.0,0.000180801331506,0.0,0.000181037337375,0.0,0.000180833990281,0.0,0.0,0.0,0.0,0.000361359493778,0.0,0.0,0.0,0.000180916715577,0.0,0.000180707745627,0.0,0.000180916715577,0.0,0.0,0.0,0.0,0.000179952357389,0.0,0.0,0.0,0.0,0.0,0.000180600757327,0.000180374764761,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180693110799,0.0,0.0,0.000180833990281,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180374764761,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180916715577,0.000180619975168,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_9
y12_PT_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0243458291178,0.0,0.0,0.0121240822392,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121313846429,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_10
y12_PT_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100262834878,0.0100262834878,0.0200525091271,0.0100271594796,0.0100407290877,0.0,0.0,0.0100329195386,0.0100584803169,0.0100299568683,0.010069669872,0.0301274018933,0.0200763468462,0.0100602901489,0.050256308245,0.0602515807981,0.0200523355815,0.0301294679116,0.0300697517188,0.0200694174208,0.0401916050809,0.0501888320874,0.0100187053327,0.0301061177729,0.0602174914963,0.0301159065675,0.0100532491586,0.0401922000942,0.0301372030841,0.0200696198906,0.0301141297918,0.0602747202031,0.0100299568683,0.0301103779026,0.010035564042,0.0100153625151,0.0100262834878,0.0100609843311,0.0100153625151,0.0200594344204,0.0100187053327,0.020073950265,0.03012092286,0.0300812966291,0.0100702896775,0.010045732984,0.010069669872,0.0100609843311,0.0,0.0,0.010036700352,0.0,0.0100532491586,0.0,0.0,0.0,0.0,0.0,0.0100324443544,0.0,0.0,0.0,0.0,0.0200624012227,0.010045732984,0.0,0.0,0.0,0.0,0.010034093037,0.0100568564265,0.0100262834878,0.0,0.0,0.0,0.0,0.010054654051,0.0,0.0,0.0,0.0,0.0100568564265,0.0,0.0100568564265,0.0,0.0,0.0,0.0,0.0,0.0100324443544,0.0,0.0,0.010069669872,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_11
y12_PT_11_weights = numpy.array([0.0,0.0,0.00549807765066,0.0384458680927,0.0604854925616,0.043981907341,0.0384939335531,0.0329657758865,0.0605161251009,0.0440202183285,0.0825228419348,0.0329611444415,0.0824654363938,0.0440247278932,0.115528874828,0.055037003873,0.0990400371032,0.0990143610226,0.115529606109,0.0715340075666,0.0990226894981,0.121036272246,0.115432142632,0.131959006957,0.120907891843,0.0770056128531,0.109987391601,0.0714536073078,0.082487943591,0.126524168843,0.109991779285,0.154095160489,0.131984601784,0.214593897358,0.13201811882,0.0935006664646,0.121051182249,0.148391901703,0.12651149331,0.181532205872,0.115532937499,0.115531353057,0.115485891769,0.0935011539852,0.0989567117213,0.0715314074572,0.120996945591,0.104563928965,0.104473168895,0.0494896297726,0.0550004804609,0.115592699389,0.0935586407796,0.109970003369,0.0659756642545,0.0770270231292,0.0604668449018,0.0880393803624,0.0494766698522,0.0660910034839,0.0495250156369,0.0990707508959,0.0605111686423,0.0769325254021,0.0550102308712,0.0715177975094,0.0439828823821,0.0329953480686,0.0440263123349,0.0219819061022,0.0880275579899,0.0220200342694,0.038480043281,0.0440327719818,0.0220025688469,0.0274906726403,0.0385451394583,0.0165371388203,0.0329902087898,0.0330241239672,0.0220070418476,0.0219984492985,0.0220039989071,0.0220231137741,0.0219912949349,0.0330194275195,0.0164920512977,0.00550505731941,0.0109966671979,0.00549249960339,0.0220139077616,0.0110050931775,0.00549807765066,0.010978921451,0.0110062185374,0.00547933654939,0.0,0.0110014042722,0.0165104876986,0.00549249960339,0.00549992210329,0.0109774507641,0.00549914613313,0.0,0.00551434458529,0.00547500574212,0.00549610725523,0.0,0.0109883265343,0.00548818910947,0.0,0.0,0.0,0.0,0.00550985533385,0.0,0.0,0.0,0.0,0.00550427322391,0.0,0.0109999417107,0.00549405154371,0.0,0.00549610725523,0.0,0.0,0.00548486990728,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00548818910947,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00549455531491,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00548486990728,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00549610725523,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00551434458529,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_12
y12_PT_12_weights = numpy.array([0.0,0.0,0.0404555739247,0.0986830438119,0.110524815069,0.128303966011,0.138153543725,0.145080246196,0.156924622832,0.165813937765,0.136195380449,0.14410018253,0.152946488657,0.132233680207,0.148015066103,0.165759224792,0.144087195714,0.137186947868,0.116440870724,0.140116156188,0.129280542477,0.165810169986,0.120395837061,0.114469239639,0.158926356073,0.129264068461,0.135175715012,0.153925951082,0.143109857676,0.141115579829,0.120384052729,0.121346960972,0.113487773076,0.102626225816,0.115461448382,0.103621761429,0.106590210776,0.0986786747906,0.11151670315,0.117435845178,0.0957135923964,0.0927552439067,0.0858691853592,0.0996979393998,0.100651708772,0.0848227045532,0.0789584759119,0.0572336979159,0.075996399725,0.0838878542453,0.0661180826693,0.0572389086753,0.0601896414399,0.0542925850147,0.0552787813435,0.047355942019,0.0562578028575,0.0572376260268,0.043417409615,0.0444151497799,0.0473616738542,0.0355456237071,0.0414578835247,0.0325667448256,0.0355149964671,0.0256486040337,0.0345424523277,0.0305906324875,0.0335513218107,0.021711554692,0.0207260638199,0.0266419471194,0.0226939070837,0.0187462197765,0.0207196145031,0.0226933298919,0.0197403925753,0.0177595985704,0.0138125084963,0.00789089736982,0.0138283492047,0.0138141278399,0.0148071743131,0.0138198516587,0.00690864117674,0.00592436080408,0.00986252043833,0.00987656543888,0.00493546289698,0.00296282292875,0.00987223249208,0.00591665689681,0.00789246861417,0.0029626453621,0.0059245091103,0.00591837644739,0.00691054510804,0.00986440432824,0.00493503401141,0.00493549496319,0.0029575656734,0.00394800115533,0.00591888149022,0.00394192821573,0.0019725742325,0.00394929182033,0.0029627018788,0.00395036764172,0.0,0.00296461062003,0.00591843256326,0.00395016081466,0.00394993595035,0.00197706029546,0.00295864510224,0.000986134601334,0.00197238985179,0.00098390559882,0.00296242450607,0.0,0.0,0.00197380196757,0.0,0.0,0.00098390559882,0.0019720768054,0.0,0.0,0.0,0.0019722595828,0.00098765614306,0.0,0.0,0.0,0.00197436994034,0.00197611113561,0.000985632364299,0.0,0.00197057971416,0.00197633800405,0.000987941532341,0.0,0.0,0.000984216240242,0.00295975940308,0.000985632364299,0.000987858961847,0.0,0.0,0.0,0.000988435351995,0.0,0.0,0.0,0.0019755323405,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000986134601334,0.0,0.00197462166009,0.0,0.0,0.00098390559882,0.0,0.00196933113606,0.000987941532341,0.0,0.0,0.000988169603269,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00197238584351,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988169603269,0.0,0.0,0.0,0.0,0.0,0.000985425537236,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988891493851,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000988891493851,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00098679115701,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000986287316665,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_13
y12_PT_13_weights = numpy.array([0.0,0.0,0.0219339510763,0.0433564801511,0.0519319670777,0.0657933590032,0.0660389033401,0.0678158089202,0.0756267755292,0.0715837161774,0.0862209513873,0.0766301984434,0.0746045876982,0.0801696858506,0.0846917507101,0.0809126405176,0.0768815443004,0.0836894480894,0.0796439480982,0.0655393724543,0.074604987803,0.0794060457636,0.0781469958707,0.0748620151486,0.0781373933546,0.0723486766103,0.0796589920399,0.072348236495,0.0693185626678,0.0685624445517,0.0690727382575,0.0688243131658,0.0567195415053,0.0567158605409,0.055199183145,0.0577277656776,0.0514263946089,0.0511674067496,0.0489061342569,0.0511715678398,0.0433478378867,0.0403335280851,0.0421007911388,0.0360480052037,0.0420908285284,0.0380616168049,0.037812095426,0.039062711109,0.0350399170671,0.0350424577328,0.0315052669273,0.0375564764493,0.0337895134396,0.0269754200287,0.0297520635576,0.0284871921393,0.0277350190569,0.0287422309639,0.0274815566453,0.0211727116183,0.0224359706141,0.0219306021988,0.0186556000914,0.0196619197646,0.01587585575,0.0168843239862,0.0136101460947,0.0156320478681,0.0148670474247,0.0128536798875,0.0136103141388,0.0138685377979,0.0128584131276,0.0103387449308,0.00957886182823,0.00882363594592,0.0100810174017,0.00907349741397,0.0100833980255,0.00756333372484,0.00579558654437,0.00453950943034,0.00353109080714,0.00655416530518,0.0070585334584,0.00504006458449,0.00479108334708,0.00529487534934,0.00352733262243,0.0037818717161,0.00604699641812,0.00378154042929,0.00302329215507,0.00277305018733,0.00327762319422,0.00378171327458,0.00227029365638,0.00377859125656,0.00201669800992,0.0030248585655,0.00126065631386,0.00327871508032,0.00302570798806,0.00100792769457,0.0020157613645,0.002017457809,0.000756116515664,0.00227030285879,0.00126117605004,0.00100802772078,0.000252300987242,0.00126007616185,0.00151270595524,0.00125995252945,0.00201806636845,0.000253126323495,0.000504488984879,0.00176464396738,0.000755854847102,0.0,0.00125919313048,0.000755864449618,0.00100762721584,0.000756502616829,0.000755615184307,0.000756903121769,0.0,0.000756331772065,0.000757081568525,0.000503826011169,0.000251545669336,0.000252215804923,0.00100759880839,0.000251640054066,0.0012595588263,0.000252050601637,0.00126095439196,0.000252077688734,0.000503636361477,0.000503856019031,0.00025212614143,0.000251452404899,0.000504366952905,0.000252300987242,0.0,0.000504435370831,0.000504054871134,0.0,0.0,0.0,0.0,0.00050530839958,0.000252143105875,0.000757478072416,0.000252081529741,0.0,0.0,0.0,0.000251610006193,0.0,0.000252181916043,0.000252181916043,0.0,0.000252013191835,0.0,0.0,0.0,0.000251640054066,0.0,0.000504195307931,0.000252013191835,0.000251953496194,0.0,0.000251640054066,0.0,0.0,0.0,0.0,0.0,0.0,0.000251953496194,0.000503721583807,0.0,0.000251870154357,0.0,0.000252081529741,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000253126323495,0.0,0.0,0.000252319952211,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_14
y12_PT_14_weights = numpy.array([0.0,0.0,0.0146096772693,0.021758495,0.0343474538677,0.0383593871185,0.0355002019515,0.0417763503913,0.0403583602722,0.0409264861167,0.0429340873848,0.0461088235928,0.0392320564094,0.0463873927156,0.0552659024475,0.0449649635666,0.0475472592438,0.0492385196436,0.0486970179809,0.054106865738,0.0466707607897,0.0492563657436,0.051256168716,0.0449456477878,0.0415010205606,0.0363787299621,0.0415140777071,0.0377901715122,0.0335132161781,0.0352126647885,0.0326160622379,0.034076663045,0.0280533793454,0.0303553962733,0.0366471912938,0.0326401369767,0.0291897410102,0.0257441539927,0.0246234189129,0.0263335351921,0.0294989234429,0.0237705752894,0.0249113959797,0.0223331794112,0.024065270888,0.0208941438913,0.0206119955507,0.0211802813601,0.0208974231746,0.022605149976,0.0191845974876,0.0177500809801,0.020885125862,0.0183180168661,0.0183229457889,0.0143211603204,0.0120221327392,0.0203195994495,0.0160370353411,0.0140244951523,0.0140250250365,0.0111656599088,0.014604828329,0.0125924181117,0.0100240193976,0.0111681693604,0.00888281279153,0.0114649844958,0.0100214999482,0.00944925600368,0.00886616742913,0.00601225710938,0.00830857728228,0.00972956374653,0.00600798804232,0.00600687828484,0.00658325232675,0.00601125432852,0.00429402660346,0.00458683961347,0.00429366068343,0.00572721440128,0.00515682505151,0.00544244063442,0.00429459148001,0.0031441848845,0.00515571429425,0.00343500832931,0.00286189257535,0.00487071757611,0.00314479575101,0.00172151578797,0.00343789169919,0.002867243406,0.00314971267649,0.0022891007506,0.00200185852318,0.00114590158001,0.00257035928567,0.00258186677088,0.00114172249329,0.00171778360359,0.00114819807815,0.00143318979739,0.000573314010631,0.00171753565777,0.000858264438894,0.000858423504132,0.0014308313128,0.000574578134375,0.000857224466165,0.00114484081183,0.000860916359355,0.000571130587786,0.000574545841432,0.000859721620448,0.00172151178885,0.000284795062216,0.0005703132664,0.00143078132372,0.0011466594144,0.00028633552557,0.000572674650354,0.000571115291129,0.000287272920716,0.000855618217187,0.000862215175518,0.000573303212991,0.000287149047787,0.00114642746509,0.00114197043911,0.0,0.00143326278144,0.000287149047787,0.00114607154287,0.00114405398378,0.00028633552557,0.000858730337078,0.0,0.0,0.0,0.000574128632607,0.00057373261915,0.000286470596052,0.0,0.000286459698434,0.0,0.0,0.0,0.0,0.000573484573357,0.0,0.0,0.000574942254802,0.000286459698434,0.000287793107037,0.0,0.0,0.00114748523393,0.0,0.0,0.0,0.0,0.00028633552557,0.000286459698434,0.000284655592695,0.0,0.0,0.0,0.0,0.0,0.000287793107037,0.0,0.0,0.0,0.0,0.000286785027338,0.000572226448302,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000287793107037,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000286204054301,0.0,0.0,0.0,0.0,0.0,0.0,0.000287296715516,0.00028633552557,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000286459698434,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000284093715485,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_15
y12_PT_15_weights = numpy.array([0.0,0.0,0.00317549164396,0.00423287348577,0.0058513110604,0.00669466788454,0.00727750188691,0.00727704925564,0.00799003570211,0.00841980908908,0.00868228493094,0.00999928684661,0.00900144026577,0.00835713223184,0.00921568992266,0.00829163900229,0.00872287086779,0.00762454852798,0.00757950333496,0.00844561326227,0.00777300320113,0.00774927777895,0.00729859115107,0.00770839008793,0.0060869601177,0.0075572238182,0.00647858674743,0.00617587701527,0.00574467867801,0.00643529759609,0.00544068649059,0.00596185786504,0.00548180049725,0.00548263451226,0.00541753523955,0.00464266823159,0.00483876234549,0.00421193928972,0.00444916836542,0.00477296317072,0.00449073081231,0.00468718116377,0.00451358030918,0.00423060194738,0.00380176525566,0.00414543224995,0.0039289986472,0.00323784034353,0.00339069937037,0.00315027631053,0.00328301923042,0.00302356973149,0.00282978403705,0.00317452938342,0.00349609372243,0.00301955765824,0.00267927617818,0.00267722089695,0.00259035006035,0.00259169831478,0.00203059019407,0.002743114789,0.00200845359145,0.00267828458043,0.00196567448848,0.00177100029563,0.00168455820142,0.00172815539349,0.00129626805086,0.00172606574581,0.00164118020023,0.00138216279652,0.00146845610916,0.00183473119697,0.00125315618009,0.00144685176738,0.00112164039386,0.00114485576766,0.000885574748154,0.000862246635633,0.00110174724972,0.00110143376066,0.000907472462054,0.00116635030446,0.000842353910599,0.00095044812434,0.000885555469415,0.000691143215958,0.000669995277367,0.000885505177052,0.000734133546849,0.000583326029316,0.000626445024836,0.00051837637638,0.000583270288614,0.00058274724804,0.000367405869681,0.000539808885924,0.000604987369111,0.000539761946385,0.000475160568062,0.000432123716735,0.00047555117208,0.000388728364681,0.000431860520036,0.000496689890404,0.000323978707708,0.000172875010068,0.000345699392491,0.000280493583786,0.000410434883782,0.000410595735522,0.000388662230224,0.000410434841871,0.000324068898679,0.000237658362924,0.000172815371708,0.000367094895237,0.000216033820375,0.000216067055245,0.00019429624574,0.00015107327076,0.000151234457783,0.000259339484382,0.000129638288509,0.000194544857654,0.000151238816455,0.000215856958899,0.000215983444192,0.00012958849907,0.000194445027314,8.64393281322e-05,0.000172799403883,6.47281988982e-05,0.000172721283079,0.00017289877321,0.000107937804492,0.000129615656946,0.000151290282306,0.00010798189413,4.32334944659e-05,0.000151126748306,4.32964437401e-05,0.000129546882139,8.642516245e-05,2.15567316799e-05,4.32323209774e-05,4.32445587857e-05,2.15987551401e-05,8.47219696708e-05,4.3196361938e-05,4.31863034654e-05,2.15598456154e-05,2.16090064001e-05,6.47499922555e-05,6.4696975723e-05,0.0,4.31856329005e-05,0.0,0.0,2.16097649766e-05,4.31772927504e-05,0.0,4.31923804592e-05,0.0,8.6318584551e-05,6.48849015189e-05,2.16145930434e-05,0.0,0.0,4.3143010123e-05,4.31813161394e-05,0.0,2.16145930434e-05,0.0,0.0,0.0,0.0,2.15987551401e-05,0.0,2.15987551401e-05,2.15263383286e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.16755767244e-05,4.32703336217e-05,0.0,0.0,0.0,0.0,0.0,2.15916555349e-05,2.15937845783e-05,0.0,0.0,0.0,2.15777916069e-05,0.0,4.32015169052e-05,0.0,0.0,0.0,4.31094818811e-05,0.0,0.0,0.0,8.63439821942e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.16557196231e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.15793716253e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.15567316799e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y12_PT_16
y12_PT_16_weights = numpy.array([0.0,0.0,0.000511549513695,0.00087732258465,0.00164763595878,0.00187282009597,0.00192695411126,0.00193120166363,0.00221326884388,0.00212201330013,0.00227029297696,0.00237976101535,0.00183859017047,0.00238154023484,0.00201331902915,0.00212286429576,0.00212401380818,0.00175817182589,0.00212404945198,0.00164572307506,0.0015879132933,0.00204177763,0.00175832628234,0.0016780876419,0.0018141088232,0.00193015314196,0.00127422100647,0.00170438830793,0.00175650250811,0.00198530152028,0.00141862664821,0.00133417446553,0.00164450078989,0.00144215170214,0.00141838130008,0.00138724421648,0.00121948921501,0.00104787280231,0.00118796108926,0.00132856784492,0.00107718848794,0.00116141581767,0.00116198656396,0.000992617938144,0.000907263076322,0.000935620092354,0.00113361514189,0.00102121228245,0.00079418313327,0.00082259703086,0.000849617553055,0.00102260476675,0.000765243192657,0.000652927511999,0.00056749764969,0.00084954492882,0.000650941112947,0.000822569258402,0.000538282212236,0.000597039376394,0.000566698783111,0.000677542672022,0.000622389392679,0.000312193613725,0.000453443145847,0.000283891548545,0.000454086961914,0.00056872186557,0.000565829965581,0.000455122711222,0.000397122383667,0.000368220760135,0.000255580275385,0.000310295433068,0.000423606912278,0.0002818836147,0.0003408721665,0.00028366521044,0.000340693799004,0.000308737205116,0.000227247764463,0.000198494953779,0.000142024353991,0.000141051397171,0.000283144662502,0.000170569821743,5.68384437634e-05,0.000255569285215,0.000227300933126,0.000170233730449,0.000198646439913,0.000255454631004,0.000168957237003,0.000255563938645,0.000170285265438,0.000255431016989,0.000170457246754,8.50148794132e-05,0.000113645209103,8.45933172667e-05,0.000111834519114,0.000113717907595,0.00025524210487,5.67908889988e-05,2.84032787087e-05,8.51333801836e-05,8.52839603704e-05,5.69095976913e-05,0.000142074849369,5.67590175044e-05,8.51662615855e-05,0.000115179154719,0.000113431405733,2.83557387957e-05,8.51149642223e-05,0.000142099800026,8.53235695388e-05,5.68105376413e-05,8.50226319389e-05,0.000197466333231,2.84032787087e-05,0.000170307542811,8.52138163501e-05,8.49181065069e-05,2.98963973264e-05,8.49032994799e-05,2.83557387957e-05,2.84139718475e-05,5.65611053267e-05,0.000113448069207,0.0,2.84032787087e-05,8.51044493024e-05,2.8435150203e-05,0.000113262810577,2.82222527794e-05,2.83557387957e-05,5.67590175044e-05,2.8435150203e-05,0.0,5.68384437634e-05,5.66601653766e-05,2.83557387957e-05,2.84139718475e-05,5.66189076827e-05,5.68105376413e-05,0.0,5.62832916395e-05,2.83557387957e-05,0.0,2.83743626792e-05,0.0,0.0,2.8435150203e-05,5.68580775544e-05,2.84547988456e-05,0.0,0.0,0.0,0.0,2.84139718475e-05,5.67487105067e-05,0.0,0.0,5.67114627398e-05,0.0,2.70068735911e-05,2.83557387957e-05,0.0,2.84547988456e-05,0.0,2.83557387957e-05,0.0,0.0,0.0,0.0,2.84032787087e-05,0.0,0.0,2.84032787087e-05,2.83743626792e-05,0.0,2.84139718475e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.83743626792e-05,0.0,2.84547988456e-05,0.0,2.83743626792e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.83743626792e-05,0.0,0.0,0.0,2.83743626792e-05,2.84547988456e-05,2.83557387957e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84547988456e-05,0.0,0.0,2.84139718475e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84139718475e-05,0.0,0.0,0.0,0.0,0.0,0.0,2.8435150203e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights+y12_PT_16_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights+y12_PT_1_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y12_PT_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"p_{T} [ a_{2} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights+y12_PT_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y12_PT_0_weights+y12_PT_1_weights+y12_PT_2_weights+y12_PT_3_weights+y12_PT_4_weights+y12_PT_5_weights+y12_PT_6_weights+y12_PT_7_weights+y12_PT_8_weights+y12_PT_9_weights+y12_PT_10_weights+y12_PT_11_weights+y12_PT_12_weights+y12_PT_13_weights+y12_PT_14_weights+y12_PT_15_weights+y12_PT_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_11.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_11.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_11.eps')
# Running!
if __name__ == '__main__':
selection_11()
| 339.489691 | 6,280 | 0.732361 | 16,293 | 65,861 | 2.915547 | 0.149021 | 0.400185 | 0.585184 | 0.761552 | 0.363514 | 0.352694 | 0.351009 | 0.336779 | 0.329306 | 0.322906 | 0 | 0.638024 | 0.025129 | 65,861 | 193 | 6,281 | 341.248705 | 0.101828 | 0.019997 | 0 | 0.185841 | 0 | 0.00885 | 0.016153 | 0.003147 | 0 | 0 | 0 | 0 | 0 | 1 | 0.00885 | false | 0 | 0.035398 | 0 | 0.044248 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
043694c63dff8d8022675955a22affbc3ca7e85c | 24,761 | py | Python | bridge/marks/migrations/0001_initial.py | mutilin/klever | 52877601e252279375091e049d096d8b302717a6 | [
"Apache-2.0"
] | null | null | null | bridge/marks/migrations/0001_initial.py | mutilin/klever | 52877601e252279375091e049d096d8b302717a6 | [
"Apache-2.0"
] | 63 | 2019-01-24T15:17:55.000Z | 2020-02-20T13:15:18.000Z | bridge/marks/migrations/0001_initial.py | mutilin/klever | 52877601e252279375091e049d096d8b302717a6 | [
"Apache-2.0"
] | 1 | 2019-10-11T07:27:10.000Z | 2019-10-11T07:27:10.000Z | #
# Copyright (c) 2018 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
from django.db.models.deletion import CASCADE, SET_NULL, PROTECT
status_choices = [('0', 'Unreported'), ('1', 'Reported'), ('2', 'Fixed'), ('3', 'Rejected')]
class Migration(migrations.Migration):
initial = True
dependencies = [
('jobs', '0001_initial'), ('reports', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ConvertedTraces',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hash_sum', models.CharField(db_index=True, max_length=255)),
('file', models.FileField(upload_to='Error-traces')),
],
options={'db_table': 'file'},
),
migrations.CreateModel(
name='MarkUnsafeConvert',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=30)),
('description', models.CharField(default='', max_length=1000)),
],
options={'db_table': 'mark_unsafe_convert'},
),
migrations.CreateModel(
name='MarkUnsafeCompare',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=30)),
('description', models.CharField(default='', max_length=1000)),
('convert', models.ForeignKey(on_delete=CASCADE, to='marks.MarkUnsafeConvert')),
],
options={'db_table': 'mark_unsafe_compare'},
),
migrations.CreateModel(
name='ErrorTraceConvertionCache',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('unsafe', models.ForeignKey(on_delete=CASCADE, to='reports.ReportUnsafe')),
('function', models.ForeignKey(on_delete=CASCADE, to='marks.MarkUnsafeConvert')),
('converted', models.ForeignKey(on_delete=CASCADE, to='marks.ConvertedTraces')),
],
options={'db_table': 'cache_error_trace_converted'},
),
migrations.CreateModel(
name='MarkAssociationsChanges',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.CharField(max_length=255, unique=True)),
('user', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('table_data', models.TextField()),
],
options={'db_table': 'cache_mark_associations_changes'},
),
migrations.CreateModel(
name='MarkSafe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.CharField(max_length=255, unique=True)),
('version', models.PositiveSmallIntegerField(default=1)),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('change_date', models.DateTimeField(auto_now=True)),
('is_modifiable', models.BooleanField(default=True)),
('type', models.CharField(choices=[('0', 'Created'), ('1', 'Preset'), ('2', 'Uploaded')],
default='0', max_length=1)),
('job', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+', to='jobs.Job')),
('format', models.PositiveSmallIntegerField(default=1)),
('status', models.CharField(choices=status_choices, default='0', max_length=1)),
('verdict', models.CharField(
choices=[('0', 'Unknown'), ('1', 'Incorrect proof'), ('2', 'Missed target bug')],
default='0', max_length=1)),
('description', models.TextField(default='')),
],
options={'db_table': 'mark_safe'},
),
migrations.CreateModel(
name='MarkSafeHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='versions', to='marks.MarkSafe')),
('version', models.PositiveSmallIntegerField()),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('change_date', models.DateTimeField()),
('status', models.CharField(choices=status_choices, default='0', max_length=1)),
('verdict', models.CharField(
choices=[('0', 'Unknown'), ('1', 'Incorrect proof'), ('2', 'Missed target bug')], max_length=1)),
('description', models.TextField()),
('comment', models.TextField()),
],
options={'db_table': 'mark_safe_history'},
),
migrations.CreateModel(
name='MarkSafeAttr',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='attrs', to='marks.MarkSafeHistory')),
('attr', models.ForeignKey(on_delete=CASCADE, to='reports.Attr')),
('is_compare', models.BooleanField(default=True)),
],
options={'db_table': 'mark_safe_attr'},
),
migrations.CreateModel(
name='MarkSafeReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='markreport_set', to='marks.MarkSafe')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='markreport_set',
to='reports.ReportSafe')),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, to=settings.AUTH_USER_MODEL)),
('type', models.CharField(choices=[('0', 'Automatic'), ('1', 'Confirmed'), ('2', 'Unconfirmed')],
default='0', max_length=1)),
],
options={'db_table': 'cache_mark_safe_report'},
),
migrations.CreateModel(
name='SafeAssociationLike',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('association', models.ForeignKey(on_delete=CASCADE, to='marks.MarkSafeReport')),
('author', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('dislike', models.BooleanField(default=False)),
],
options={'db_table': 'mark_safe_association_like'},
),
migrations.CreateModel(
name='UnknownProblem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=15)),
],
options={'db_table': 'cache_mark_unknown_problem'},
),
migrations.CreateModel(
name='ComponentMarkUnknownProblem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='mark_unknowns_cache',
to='reports.ReportComponent')),
('component', models.ForeignKey(on_delete=PROTECT, related_name='+', to='reports.Component')),
('problem', models.ForeignKey(null=True, on_delete=PROTECT, to='marks.UnknownProblem')),
('number', models.PositiveIntegerField(default=0)),
],
options={'db_table': 'cache_report_component_mark_unknown_problem'},
),
migrations.CreateModel(
name='MarkUnknown',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.CharField(max_length=255, unique=True)),
('version', models.PositiveSmallIntegerField(default=1)),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('change_date', models.DateTimeField(auto_now=True)),
('is_modifiable', models.BooleanField(default=True)),
('type', models.CharField(choices=[('0', 'Created'), ('1', 'Preset'), ('2', 'Uploaded')],
default='0', max_length=1)),
('job', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+', to='jobs.Job')),
('format', models.PositiveSmallIntegerField(default=1)),
('status', models.CharField(choices=status_choices, default='0', max_length=1)),
('component', models.ForeignKey(on_delete=PROTECT, to='reports.Component')),
('problem_pattern', models.CharField(max_length=15)),
('function', models.TextField()),
('is_regexp', models.BooleanField(default=True)),
('link', models.URLField(null=True)),
('description', models.TextField(default='')),
],
options={'db_table': 'mark_unknown'},
),
migrations.CreateModel(
name='MarkUnknownHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='versions', to='marks.MarkUnknown')),
('version', models.PositiveSmallIntegerField()),
('status', models.CharField(choices=status_choices, default='0', max_length=1)),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('change_date', models.DateTimeField()),
('problem_pattern', models.CharField(max_length=100)),
('function', models.TextField()),
('is_regexp', models.BooleanField(default=True)),
('link', models.URLField(null=True)),
('description', models.TextField()),
('comment', models.TextField()),
],
options={'db_table': 'mark_unknown_history'},
),
migrations.CreateModel(
name='MarkUnknownReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='markreport_set', to='marks.MarkUnknown')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='markreport_set',
to='reports.ReportUnknown')),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, to=settings.AUTH_USER_MODEL)),
('problem', models.ForeignKey(on_delete=PROTECT, to='marks.UnknownProblem')),
('type', models.CharField(choices=[('0', 'Automatic'), ('1', 'Confirmed'), ('2', 'Unconfirmed')],
default='0', max_length=1)),
],
options={'db_table': 'cache_mark_unknown_report'},
),
migrations.CreateModel(
name='UnknownAssociationLike',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('association', models.ForeignKey(on_delete=CASCADE, to='marks.MarkUnknownReport')),
('author', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('dislike', models.BooleanField(default=False)),
],
options={'db_table': 'mark_unknown_association_like'},
),
migrations.CreateModel(
name='MarkUnsafe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.CharField(max_length=255, unique=True)),
('version', models.PositiveSmallIntegerField(default=1)),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('change_date', models.DateTimeField(auto_now=True)),
('is_modifiable', models.BooleanField(default=True)),
('type', models.CharField(choices=[('0', 'Created'), ('1', 'Preset'), ('2', 'Uploaded')],
default='0', max_length=1)),
('job', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+', to='jobs.Job')),
('format', models.PositiveSmallIntegerField(default=1)),
('status', models.CharField(choices=status_choices, default='0', max_length=1)),
('verdict', models.CharField(
choices=[('0', 'Unknown'), ('1', 'Bug'), ('2', 'Target bug'), ('3', 'False positive')],
default='0', max_length=1)),
('function', models.ForeignKey(on_delete=CASCADE, to='marks.MarkUnsafeCompare')),
('description', models.TextField(default='')),
],
options={'db_table': 'mark_unsafe'},
),
migrations.CreateModel(
name='MarkUnsafeHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='versions', to='marks.MarkUnsafe')),
('version', models.PositiveSmallIntegerField()),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, related_name='+',
to=settings.AUTH_USER_MODEL)),
('change_date', models.DateTimeField()),
('status', models.CharField(choices=status_choices, default='0', max_length=1)),
('verdict', models.CharField(
choices=[('0', 'Unknown'), ('1', 'Bug'), ('2', 'Target bug'), ('3', 'False positive')],
max_length=1)),
('error_trace', models.ForeignKey(on_delete=CASCADE, to='marks.ConvertedTraces')),
('function', models.ForeignKey(on_delete=CASCADE, to='marks.MarkUnsafeCompare')),
('description', models.TextField()),
('comment', models.TextField()),
],
options={'db_table': 'mark_unsafe_history'},
),
migrations.CreateModel(
name='MarkUnsafeAttr',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='attrs', to='marks.MarkUnsafeHistory')),
('attr', models.ForeignKey(on_delete=CASCADE, to='reports.Attr')),
('is_compare', models.BooleanField(default=True)),
],
options={'db_table': 'mark_unsafe_attr'},
),
migrations.CreateModel(
name='MarkUnsafeReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark', models.ForeignKey(on_delete=CASCADE, related_name='markreport_set', to='marks.MarkUnsafe')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='markreport_set',
to='reports.ReportUnsafe')),
('author', models.ForeignKey(null=True, on_delete=SET_NULL, to=settings.AUTH_USER_MODEL)),
('type', models.CharField(choices=[('0', 'Automatic'), ('1', 'Confirmed'), ('2', 'Unconfirmed')],
default='0', max_length=1)),
('result', models.FloatField()),
('error', models.TextField(null=True)),
],
options={'db_table': 'cache_mark_unsafe_report'},
),
migrations.CreateModel(
name='UnsafeAssociationLike',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('association', models.ForeignKey(on_delete=CASCADE, to='marks.MarkUnsafeReport')),
('author', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('dislike', models.BooleanField(default=False)),
],
options={'db_table': 'mark_unsafe_association_like'},
),
migrations.CreateModel(
name='SafeTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('parent', models.ForeignKey(null=True, on_delete=CASCADE, related_name='children',
to='marks.SafeTag')),
('author', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('populated', models.BooleanField(default=False)),
('tag', models.CharField(db_index=True, max_length=32)),
('description', models.TextField(default='')),
],
options={'db_table': 'mark_safe_tag'},
),
migrations.CreateModel(
name='SafeTagAccess',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('tag', models.ForeignKey(on_delete=CASCADE, to='marks.SafeTag')),
('modification', models.BooleanField(default=False)),
('child_creation', models.BooleanField(default=False)),
],
options={'db_table': 'marks_safe_tag_access'},
),
migrations.CreateModel(
name='MarkSafeTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark_version', models.ForeignKey(on_delete=CASCADE, related_name='tags', to='marks.MarkSafeHistory')),
('tag', models.ForeignKey(on_delete=CASCADE, related_name='+', to='marks.SafeTag')),
],
options={'db_table': 'cache_mark_safe_tag'},
),
migrations.CreateModel(
name='ReportSafeTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='safe_tags',
to='reports.ReportComponent')),
('tag', models.ForeignKey(on_delete=CASCADE, related_name='+', to='marks.SafeTag')),
('number', models.IntegerField(default=0)),
],
options={'db_table': 'cache_report_safe_tag'},
),
migrations.CreateModel(
name='SafeReportTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='tags', to='reports.ReportSafe')),
('tag', models.ForeignKey(on_delete=CASCADE, to='marks.SafeTag')),
('number', models.PositiveIntegerField(default=0)),
],
options={'db_table': 'cache_safe_report_safe_tag'},
),
migrations.CreateModel(
name='UnsafeTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('parent', models.ForeignKey(null=True, on_delete=CASCADE, related_name='children',
to='marks.UnsafeTag')),
('author', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('populated', models.BooleanField(default=False)),
('tag', models.CharField(db_index=True, max_length=32)),
('description', models.TextField(default='')),
],
options={'db_table': 'mark_unsafe_tag'},
),
migrations.CreateModel(
name='UnsafeTagAccess',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.ForeignKey(on_delete=CASCADE, to=settings.AUTH_USER_MODEL)),
('tag', models.ForeignKey(on_delete=CASCADE, to='marks.UnsafeTag')),
('modification', models.BooleanField(default=False)),
('child_creation', models.BooleanField(default=False)),
],
options={'db_table': 'marks_unsafe_tag_access'},
),
migrations.CreateModel(
name='MarkUnsafeTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mark_version', models.ForeignKey(on_delete=CASCADE, related_name='tags',
to='marks.MarkUnsafeHistory')),
('tag', models.ForeignKey(on_delete=CASCADE, related_name='+', to='marks.UnsafeTag')),
],
options={'db_table': 'cache_mark_unsafe_tag'},
),
migrations.CreateModel(
name='ReportUnsafeTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='unsafe_tags',
to='reports.ReportComponent')),
('tag', models.ForeignKey(on_delete=CASCADE, related_name='+', to='marks.UnsafeTag')),
('number', models.IntegerField(default=0)),
],
options={'db_table': 'cache_report_unsafe_tag'},
),
migrations.CreateModel(
name='UnsafeReportTag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report', models.ForeignKey(on_delete=CASCADE, related_name='tags', to='reports.ReportUnsafe')),
('tag', models.ForeignKey(on_delete=CASCADE, to='marks.UnsafeTag')),
('number', models.PositiveIntegerField(default=0)),
],
options={'db_table': 'cache_unsafe_report_unsafe_tag'},
),
migrations.AlterIndexTogether(name='markunknown', index_together={('component', 'problem_pattern')}),
]
| 57.184758 | 120 | 0.566576 | 2,317 | 24,761 | 5.865343 | 0.109625 | 0.07535 | 0.064901 | 0.086534 | 0.810228 | 0.781898 | 0.732377 | 0.726416 | 0.708094 | 0.679617 | 0 | 0.007395 | 0.284601 | 24,761 | 432 | 121 | 57.31713 | 0.759795 | 0.026211 | 0 | 0.705882 | 0 | 0 | 0.168292 | 0.038224 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.009804 | 0 | 0.019608 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f0a468123a8872c7e735e689dc60d30a6837fb23 | 248 | py | Python | app/admin.py | Coullence/Django_Percels_-_Couriers_dashboard | 3925ebdd995d7fd21b04e99357e05962f154178d | [
"MIT"
] | null | null | null | app/admin.py | Coullence/Django_Percels_-_Couriers_dashboard | 3925ebdd995d7fd21b04e99357e05962f154178d | [
"MIT"
] | null | null | null | app/admin.py | Coullence/Django_Percels_-_Couriers_dashboard | 3925ebdd995d7fd21b04e99357e05962f154178d | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
"""
Copyright (c) 2019 - present AppSeed.us
"""
from django.contrib import admin
from .models import itemModel, stationModel, paymentModel, smsModel
admin.site.register([itemModel, stationModel, paymentModel, smsModel])
| 24.8 | 70 | 0.745968 | 28 | 248 | 6.607143 | 0.75 | 0.227027 | 0.356757 | 0.443243 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022936 | 0.120968 | 248 | 9 | 71 | 27.555556 | 0.825688 | 0.258065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
f0bfe16488772fd61357fe9f54544acc24766c57 | 16,616 | py | Python | tests/modules/test_marshal.py | ruby-compiler-survey/topaz | bf4a56adbe03ae9ab4984729c733fcbc64a164c4 | [
"BSD-3-Clause"
] | 241 | 2015-01-02T18:49:09.000Z | 2022-03-15T15:08:45.000Z | tests/modules/test_marshal.py | ruby-compiler-survey/topaz | bf4a56adbe03ae9ab4984729c733fcbc64a164c4 | [
"BSD-3-Clause"
] | 16 | 2015-05-04T21:31:08.000Z | 2020-06-04T22:49:36.000Z | tests/modules/test_marshal.py | ruby-compiler-survey/topaz | bf4a56adbe03ae9ab4984729c733fcbc64a164c4 | [
"BSD-3-Clause"
] | 24 | 2015-02-15T05:35:11.000Z | 2022-03-22T13:29:04.000Z | from ..base import BaseTopazTest
class TestMarshal(BaseTopazTest):
def test_version_constants(self, space):
w_res = space.execute("return Marshal::MAJOR_VERSION")
assert space.int_w(w_res) == 4
w_res = space.execute("return Marshal::MINOR_VERSION")
assert space.int_w(w_res) == 8
w_res = space.execute("return Marshal.dump('test')[0].ord")
assert space.int_w(w_res) == 4
w_res = space.execute("return Marshal.dump('test')[1].ord")
assert space.int_w(w_res) == 8
def test_dump_constants(self, space):
w_res = space.execute("return Marshal.dump(nil)")
assert space.str_w(w_res) == "\x04\b0"
w_res = space.execute("return Marshal.dump(true)")
assert space.str_w(w_res) == "\x04\bT"
w_res = space.execute("return Marshal.dump(false)")
assert space.str_w(w_res) == "\x04\bF"
def test_load_constants(self, space):
w_res = space.execute("return Marshal.load('\x04\b0')")
assert w_res == space.w_nil
w_res = space.execute("return Marshal.load('\x04\bT')")
assert w_res == space.w_true
w_res = space.execute("return Marshal.load('\x04\bF')")
assert w_res == space.w_false
def test_constants(self, space):
w_res = space.execute("return Marshal.load(Marshal.dump(nil))")
assert w_res == space.w_nil
w_res = space.execute("return Marshal.load(Marshal.dump(true))")
assert w_res == space.w_true
w_res = space.execute("return Marshal.load(Marshal.dump(false))")
assert w_res == space.w_false
def test_dump_tiny_integer(self, space):
w_res = space.execute("return Marshal.dump(5)")
assert space.str_w(w_res) == "\x04\bi\n"
w_res = space.execute("return Marshal.dump(100)")
assert space.str_w(w_res) == "\x04\bii"
w_res = space.execute("return Marshal.dump(0)")
assert space.str_w(w_res) == "\x04\bi\x00"
w_res = space.execute("return Marshal.dump(-1)")
assert space.str_w(w_res) == "\x04\bi\xFA"
w_res = space.execute("return Marshal.dump(-123)")
assert space.str_w(w_res) == "\x04\bi\x80"
w_res = space.execute("return Marshal.dump(122)")
assert space.str_w(w_res) == "\x04\bi\x7F"
def test_load_tiny_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\n')")
assert space.int_w(w_res) == 5
w_res = space.execute("return Marshal.load('\x04\bii')")
assert space.int_w(w_res) == 100
# w_res = space.execute('return Marshal.load("\x04\bi\x00")')
w_res = space.execute('return Marshal.load(Marshal.dump(0))')
assert space.int_w(w_res) == 0
w_res = space.execute("return Marshal.load('\x04\bi\xFA')")
assert space.int_w(w_res) == -1
w_res = space.execute("return Marshal.load('\x04\bi\x80')")
assert space.int_w(w_res) == -123
w_res = space.execute("return Marshal.load('\x04\bi\x7F')")
assert space.int_w(w_res) == 122
def test_dump_array(self, space):
w_res = space.execute("return Marshal.dump([])")
assert space.str_w(w_res) == "\x04\b[\x00"
w_res = space.execute("return Marshal.dump([nil])")
assert space.str_w(w_res) == "\x04\b[\x060"
w_res = space.execute("return Marshal.dump([nil, true, false])")
assert space.str_w(w_res) == "\x04\b[\b0TF"
w_res = space.execute("return Marshal.dump([1, 2, 3])")
assert space.str_w(w_res) == "\x04\b[\x08i\x06i\x07i\x08"
w_res = space.execute("return Marshal.dump([1, [2, 3], 4])")
assert space.str_w(w_res) == "\x04\b[\bi\x06[\ai\ai\bi\t"
w_res = space.execute("return Marshal.dump([:foo, :bar])")
assert space.str_w(w_res) == "\x04\b[\a:\bfoo:\bbar"
def test_load_array(self, space):
# w_res = space.execute("return Marshal.load('\x04\b[\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump([]))")
assert self.unwrap(space, w_res) == []
w_res = space.execute("return Marshal.load('\x04\b[\x060')")
assert self.unwrap(space, w_res) == [None]
w_res = space.execute("return Marshal.load('\x04\b[\b0TF')")
assert self.unwrap(space, w_res) == [None, True, False]
w_res = space.execute("return Marshal.load('\x04\b[\x08i\x06i\x07i\x08')")
assert self.unwrap(space, w_res) == [1, 2, 3]
w_res = space.execute("return Marshal.load('\x04\b[\bi\x06[\ai\ai\bi\t')")
assert self.unwrap(space, w_res) == [1, [2, 3], 4]
w_res = space.execute("return Marshal.load('\x04\b[\a:\bfoo:\bbar')")
assert self.unwrap(space, w_res) == ["foo", "bar"]
def test_dump_symbol(self, space):
w_res = space.execute("return Marshal.dump(:abc)")
assert space.str_w(w_res) == "\x04\b:\babc"
w_res = space.execute("return Marshal.dump(('hello' * 25).to_sym)")
assert space.str_w(w_res) == "\x04\b:\x01}" + "hello" * 25
w_res = space.execute("return Marshal.dump(('hello' * 100).to_sym)")
assert space.str_w(w_res) == "\x04\b:\x02\xF4\x01" + "hello" * 100
def test_load_symbol(self, space):
w_res = space.execute("return Marshal.load('\x04\b:\babc')")
assert space.symbol_w(w_res) == "abc"
w_res = space.execute("return Marshal.load('\x04\b:\x01}' + 'hello' * 25)")
assert space.symbol_w(w_res) == "hello" * 25
def test_dump_hash(self, space):
w_res = space.execute("return Marshal.dump({})")
assert space.str_w(w_res) == "\x04\b{\x00"
w_res = space.execute("return Marshal.dump({1 => 2, 3 => 4})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x06i\ai\bi\t"
w_res = space.execute("return Marshal.dump({1 => {2 => 3}, 4 => 5})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x06{\x06i\ai\bi\ti\n"
w_res = space.execute("return Marshal.dump({1234 => {23456 => 3456789}, 4 => 5})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n"
def test_load_hash(self, space):
# w_res = space.execute("return Marshal.load('\x04\b{\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump({}))")
assert self.unwrap(space, w_res) == {}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x06i\ai\bi\t')")
assert self.unwrap(space, w_res) == {1: 2, 3: 4}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x06{\x06i\ai\bi\ti\n')")
assert self.unwrap(space, w_res) == {1: {2: 3}, 4: 5}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n')")
assert self.unwrap(space, w_res) == {1234: {23456: 3456789}, 4: 5}
def test_dump_integer(self, space):
w_res = space.execute("return Marshal.dump(123)")
assert space.str_w(w_res) == "\x04\bi\x01{"
w_res = space.execute("return Marshal.dump(255)")
assert space.str_w(w_res) == "\x04\bi\x01\xFF"
w_res = space.execute("return Marshal.dump(256)")
assert space.str_w(w_res) == "\x04\bi\x02\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 16 - 2)")
assert space.str_w(w_res) == "\x04\bi\x02\xFE\xFF"
w_res = space.execute("return Marshal.dump(2 ** 16 - 1)")
assert space.str_w(w_res) == "\x04\bi\x02\xFF\xFF"
w_res = space.execute("return Marshal.dump(2 ** 16)")
assert space.str_w(w_res) == "\x04\bi\x03\x00\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 16 + 1)")
assert space.str_w(w_res) == "\x04\bi\x03\x01\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 30 - 1)")
assert space.str_w(w_res) == "\x04\bi\x04\xFF\xFF\xFF?"
# TODO: test tooo big numbers (they give a warning and inf)
def test_load_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\x01{')")
assert space.int_w(w_res) == 123
w_res = space.execute("return Marshal.load('\x04\bi\x01\xFF')")
assert space.int_w(w_res) == 255
# w_res = space.execute("return Marshal.load('\x04\bi\x02\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(256))")
assert space.int_w(w_res) == 256
w_res = space.execute("return Marshal.load('\x04\bi\x02\xFE\xFF')")
assert space.int_w(w_res) == 2 ** 16 - 2
w_res = space.execute("return Marshal.load('\x04\bi\x02\xFF\xFF')")
assert space.int_w(w_res) == 2 ** 16 - 1
# w_res = space.execute("return Marshal.load('\x04\bi\x03\x00\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16))")
assert space.int_w(w_res) == 2 ** 16
# w_res = space.execute("return Marshal.load('\x04\bi\x03\x01\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16 + 1))")
assert space.int_w(w_res) == 2 ** 16 + 1
w_res = space.execute("return Marshal.load('\x04\bi\x04\xFF\xFF\xFF?')")
assert space.int_w(w_res) == 2 ** 30 - 1
def test_dump_negative_integer(self, space):
w_res = space.execute("return Marshal.dump(-1)")
assert space.str_w(w_res) == "\x04\bi\xFA"
w_res = space.execute("return Marshal.dump(-123)")
assert space.str_w(w_res) == "\x04\bi\x80"
w_res = space.execute("return Marshal.dump(-124)")
assert space.str_w(w_res) == "\x04\bi\xFF\x84"
w_res = space.execute("return Marshal.dump(-256)")
assert space.str_w(w_res) == "\x04\bi\xFF\x00"
w_res = space.execute("return Marshal.dump(-257)")
assert space.str_w(w_res) == "\x04\bi\xFE\xFF\xFE"
w_res = space.execute("return Marshal.dump(-(2 ** 30))")
assert space.str_w(w_res) == "\x04\bi\xFC\x00\x00\x00\xC0"
def test_load_negative_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\xFA')")
assert space.int_w(w_res) == -1
w_res = space.execute("return Marshal.load('\x04\bi\x80')")
assert space.int_w(w_res) == -123
w_res = space.execute("return Marshal.load('\x04\bi\xFF\x84')")
assert space.int_w(w_res) == -124
# w_res = space.execute("return Marshal.load('\x04\bi\xFF\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-256))")
assert space.int_w(w_res) == -256
w_res = space.execute("return Marshal.load('\x04\bi\xFE\xFF\xFE')")
assert space.int_w(w_res) == -257
# w_res = space.execute("return Marshal.load('\x04\bi\xFE\x00\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 16)))")
assert space.int_w(w_res) == -(2 ** 16)
w_res = space.execute("return Marshal.load('\x04\bi\xFD\xFF\xFF\xFE')")
assert space.int_w(w_res) == -(2 ** 16 + 1)
# w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 24)))")
assert space.int_w(w_res) == -(2 ** 24)
w_res = space.execute("return Marshal.load('\x04\bi\xFC\xFF\xFF\xFF\xFE')")
assert space.int_w(w_res) == -(2 ** 24 + 1)
# w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00\xC0')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 30)))")
assert space.int_w(w_res) == -(2 ** 30)
def test_dump_float(self, space):
w_res = space.execute("return Marshal.dump(0.0)")
assert space.str_w(w_res) == "\x04\bf\x060"
w_res = space.execute("return Marshal.dump(0.1)")
assert space.str_w(w_res) == "\x04\bf\b0.1"
w_res = space.execute("return Marshal.dump(1.0)")
assert space.str_w(w_res) == "\x04\bf\x061"
w_res = space.execute("return Marshal.dump(1.1)")
assert space.str_w(w_res) == "\x04\bf\b1.1"
w_res = space.execute("return Marshal.dump(1.001)")
assert space.str_w(w_res) == "\x04\bf\n1.001"
# w_res = space.execute("return Marshal.dump(123456789.123456789)")
# assert space.str_w(w_res) == "\x04\bf\x17123456789.12345679"
# w_res = space.execute("return Marshal.dump(-123456789.123456789)")
# assert space.str_w(w_res) == "\x04\bf\x18-123456789.12345679"
# w_res = space.execute("return Marshal.dump(-0.0)")
# assert space.str_w(w_res) == "\x04\bf\a-0"
def test_load_float(self, space):
w_res = space.execute("return Marshal.load('\x04\bf\x060')")
assert space.float_w(w_res) == 0.0
w_res = space.execute("return Marshal.load('\x04\bf\b0.1')")
assert space.float_w(w_res) == 0.1
w_res = space.execute("return Marshal.load('\x04\bf\x061')")
assert space.float_w(w_res) == 1.0
w_res = space.execute("return Marshal.load('\x04\bf\b1.1')")
assert space.float_w(w_res) == 1.1
w_res = space.execute("return Marshal.load('\x04\bf\n1.001')")
assert space.float_w(w_res) == 1.001
# w_res = space.execute("return Marshal.load('\x04\bf\x17123456789.12345679')")
# assert space.float_w(w_res) == 123456789.123456789
# w_res = space.execute("return Marshal.load('\x04\bf\x18-123456789.12345679')")
# assert space.float_w(w_res) == -123456789.123456789
# w_res = space.execute("return Marshal.load('\x04\bf\a-0')")
# assert repr(space.float_w(w_res)) == repr(-0.0)
def test_dump_string(self, space):
w_res = space.execute("return Marshal.dump('')")
assert space.str_w(w_res) == "\x04\bI\"\x00\x06:\x06ET"
w_res = space.execute("return Marshal.dump('abc')")
assert space.str_w(w_res) == "\x04\bI\"\babc\x06:\x06ET"
w_res = space.execute("return Marshal.dump('i am a longer string')")
assert space.str_w(w_res) == "\x04\bI\"\x19i am a longer string\x06:\x06ET"
def test_load_string(self, space):
# w_res = space.execute("return Marshal.load('\x04\bI\"\x00\x06:\x06ET')")
w_res = space.execute("return Marshal.load(Marshal.dump(''))")
assert space.str_w(w_res) == ""
w_res = space.execute("return Marshal.load('\x04\bI\"\babc\x06:\x06ET')")
assert space.str_w(w_res) == "abc"
w_res = space.execute("return Marshal.load('\x04\bI\"\x19i am a longer string\x06:\x06ET')")
assert space.str_w(w_res) == "i am a longer string"
def test_array(self, space):
w_res = space.execute("return Marshal.load(Marshal.dump([1, 2, 3]))")
assert self.unwrap(space, w_res) == [1, 2, 3]
w_res = space.execute("return Marshal.load(Marshal.dump([1, [2, 3], 4]))")
assert self.unwrap(space, w_res) == [1, [2, 3], 4]
w_res = space.execute("return Marshal.load(Marshal.dump([130, [2, 3], 4]))")
assert self.unwrap(space, w_res) == [130, [2, 3], 4]
w_res = space.execute("return Marshal.load(Marshal.dump([-10000, [2, 123456], -9000]))")
assert self.unwrap(space, w_res) == [-10000, [2, 123456], -9000]
w_res = space.execute("return Marshal.load(Marshal.dump([:foo, :bar]))")
assert self.unwrap(space, w_res) == ["foo", "bar"]
w_res = space.execute("return Marshal.load(Marshal.dump(['foo', 'bar']))")
assert self.unwrap(space, w_res) == ["foo", "bar"]
def test_incompatible_format(self, space):
with self.raises(
space,
"TypeError",
"incompatible marshal file format (can't be read)\n"
"format version 4.8 required; 97.115 given"
):
space.execute("Marshal.load('asd')")
def test_short_data(self, space):
with self.raises(space, "ArgumentError", "marshal data too short"):
space.execute("Marshal.load('')")
def test_parameters(self, space):
with self.raises(space, "TypeError", "instance of IO needed"):
space.execute("Marshal.load(4)")
def test_io(self, space, tmpdir):
f = tmpdir.join("testfile")
w_res = space.execute("""
Marshal.dump('hallo', File.new('%s', 'wb'))
file = File.open('%s', 'rb')
return Marshal.load(file.read)
""" % (f, f))
assert space.str_w(w_res) == "hallo"
w_res = space.execute("""
Marshal.dump('hallo', File.new('%s', 'wb'))
file = File.open('%s', 'rb')
return Marshal.load(file)
""" % (f, f))
assert space.str_w(w_res) == "hallo"
| 41.333333 | 113 | 0.599182 | 2,534 | 16,616 | 3.778216 | 0.065114 | 0.098183 | 0.121266 | 0.205557 | 0.909129 | 0.890746 | 0.865991 | 0.817004 | 0.725715 | 0.628473 | 0 | 0.071769 | 0.216779 | 16,616 | 401 | 114 | 41.436409 | 0.6639 | 0.091478 | 0 | 0.166023 | 0 | 0.019305 | 0.327118 | 0.14863 | 0.007722 | 0 | 0 | 0.002494 | 0.409266 | 1 | 0.096525 | false | 0 | 0.003861 | 0 | 0.111969 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
50012ba1b16895c39fdabbc3d6baaf5a64542b76 | 14,434 | py | Python | changer/allnumbers.py | azwri/changer | d61c77c4f39e3969a91f6654fe2779c8a14d36ac | [
"MIT"
] | null | null | null | changer/allnumbers.py | azwri/changer | d61c77c4f39e3969a91f6654fe2779c8a14d36ac | [
"MIT"
] | null | null | null | changer/allnumbers.py | azwri/changer | d61c77c4f39e3969a91f6654fe2779c8a14d36ac | [
"MIT"
] | null | null | null | class Numbers:
def __init__(self):
# english
self.__engilsh = (
('1', '١'),
('2', '٢'),
('3', '٣'),
('4', '٤'),
('5', '٥'),
('6', '٦'),
('7', '٧'),
('8', '٨'),
('9', '٩'),
('0', '٠'),
)
# arabic
self.__arabic = (
('١', '1'),
('٢', '2'),
('٣', '3'),
('٤', '4'),
('٥', '5'),
('٦', '6'),
('٧', '7'),
('٨', '8'),
('٩', '9'),
('٠', '0'),
)
# hindi
self.__hindi = (
('١', '1'),
('٢', '2'),
('٣', '3'),
('٤', '4'),
('٥', '5'),
('٦', '6'),
('٧', '7'),
('٨', '8'),
('٩', '9'),
('٠', '0'),
)
# persian
self.__persian = (
('۱', '1'),
('۲', '2'),
('۳', '3'),
('۴', '4'),
('۵', '5'),
('۶', '6'),
('۷', '7'),
('۸', '8'),
('۹', '9'),
('۰', '0'),
)
#bengali
self.__bengali = (
('১', '1'),
('২', '2'),
('৩', '3'),
('৪', '4'),
('৫', '5'),
('৬', '6'),
('৭', '7'),
('৮', '8'),
('৯', '9'),
('০', '0'),
)
#chinese_simple
self.__chinese_simple = (
('一', '1'),
('二', '2'),
('三', '3'),
('四', '4'),
('五', '5'),
('六', '6'),
('七', '7'),
('八', '8'),
('九', '9'),
('〇', '0'),
)
#chinese_complex
self.__chinese_complex = (
('壹', '1'),
('貳', '2'),
('參', '3'),
('肆', '4'),
('伍', '5'),
('陸', '6'),
('柒', '7'),
('捌', '8'),
('玖', '9'),
('零', '0'),
)
#chinese_malayalam
self.__malayalam = (
('൧', '1'),
('൨', '2'),
('൩', '3'),
('൪', '4'),
('൫', '5'),
('൬', '6'),
('൭', '7'),
('൮', '8'),
('൯', '9'),
('൦', '0'),
)
#__thai
self.__thai = (
('๑', '1'),
('๒', '2'),
('๓', '3'),
('๔', '4'),
('๕', '5'),
('๖', '6'),
('๗', '7'),
('๘', '8'),
('๙', '9'),
('๐', '0'),
)
#__urdu
self.__urdu = (
('۱', '1'),
('۲', '2'),
('۳', '3'),
('۴', '4'),
('۵', '5'),
('۶', '6'),
('۷', '7'),
('۸', '8'),
('۹', '9'),
('۰', '0'),
)
# ##################################################
def __to_change_1(self, __language, numbers):
num = []
if isinstance(numbers, int):
numbers = str(numbers)
for i in enumerate(numbers):
for n in __language:
if i[1] in n:
num.append(n[0])
return ''.join(map(str, num))
def __to_change_2(self, __language, numbers):
num = []
if isinstance(numbers, int):
numbers = str(numbers)
for i in enumerate(numbers):
for n in __language:
if i[1] in n:
num.append(n[1])
return ''.join(map(str, num))
# From English
english_to_arabic = lambda self, the_number : self.__to_change_1(self.__arabic, numbers=the_number)
english_to_hindi = lambda self, the_number : self.__to_change_1(self.__hindi, numbers=the_number)
english_to_persian = lambda self, the_number : self.__to_change_1(self.__persian, numbers=the_number)
english_to_bengali = lambda self, the_number : self.__to_change_1(self.__bengali, numbers=the_number)
english_to_chinese_simple = lambda self, the_number : self.__to_change_1(self.__chinese_simple, numbers=the_number)
english_to_chinese_complex = lambda self, the_number : self.__to_change_1(self.__chinese_complex, numbers=the_number)
english_to_malayalam = lambda self, the_number : self.__to_change_1(self.__malayalam, numbers=the_number)
english_to_thai = lambda self, the_number : self.__to_change_1(self.__thai, numbers=the_number)
english_to_urdu = lambda self, the_number : self.__to_change_1(self.__urdu, numbers=the_number)
# From Arabic
arabic_to_english = lambda self, the_number : self.__to_change_2(self.__arabic, numbers=the_number)
arabic_to_hindi = lambda self, the_number : self.english_to_hindi(self.arabic_to_english(the_number))
arabic_to_persian = lambda self, the_number : self.english_to_persian(self.arabic_to_english(the_number))
arabic_to_bengali = lambda self, the_number : self.english_to_bengali(self.arabic_to_english(the_number))
arabic_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.arabic_to_english(the_number))
arabic_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.arabic_to_english(the_number))
arabic_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.arabic_to_english(the_number))
arabic_to_thai = lambda self, the_number : self.english_to_thai(self.arabic_to_english(the_number))
arabic_to_urdu = lambda self, the_number : self.english_to_urdu(self.arabic_to_english(the_number))
# From Hindi
hindi_to_english = lambda self, the_number : self.__to_change_2(self.__hindi, numbers=the_number)
hindi_to_arabic = lambda self, the_number : self.arabic_to_english(self.hindi_to_english(the_number))
hindi_to_persian = lambda self, the_number : self.english_to_persian(self.hindi_to_english(the_number))
hindi_to_bengali = lambda self, the_number : self.english_to_bengali(self.hindi_to_english(the_number))
hindi_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.hindi_to_english(the_number))
hindi_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.hindi_to_english(the_number))
hindi_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.hindi_to_english(the_number))
hindi_to_thai = lambda self, the_number : self.english_to_thai(self.hindi_to_english(the_number))
hindi_to_urdu = lambda self, the_number : self.english_to_urdu(self.hindi_to_english(the_number))
# From Persian
persian_to_english = lambda self, the_number : self.__to_change_2(self.__persian, numbers=the_number)
persian_to_arabic = lambda self, the_number : self.arabic_to_english(self.persian_to_english(the_number))
persian_to_hindi = lambda self, the_number : self.english_to_hindi(self.persian_to_english(the_number))
persian_to_bengali = lambda self, the_number : self.english_to_bengali(self.persian_to_english(the_number))
persian_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.persian_to_english(the_number))
persian_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.persian_to_english(the_number))
persian_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.persian_to_english(the_number))
persian_to_thai = lambda self, the_number : self.english_to_thai(self.persian_to_english(the_number))
persian_to_urdu = lambda self, the_number : self.english_to_urdu(self.persian_to_english(the_number))
# From Bengali
bengali_to_english = lambda self, the_number : self.__to_change_2(self.__bengali, numbers=the_number)
bengali_to_arabic = lambda self, the_number : self.arabic_to_english(self.bengali_to_english(the_number))
bengali_to_hindi = lambda self, the_number : self.english_to_hindi(self.bengali_to_english(the_number))
bengali_to_persian = lambda self, the_number : self.english_to_persian(self.bengali_to_english(the_number))
bengali_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.bengali_to_english(the_number))
bengali_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.bengali_to_english(the_number))
bengali_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.bengali_to_english(the_number))
bengali_to_thai = lambda self, the_number : self.english_to_thai(self.bengali_to_english(the_number))
bengali_to_urdu = lambda self, the_number : self.english_to_urdu(self.bengali_to_english(the_number))
# From Chinese Simple
chinese_simple_to_english = lambda self, the_number : self.__to_change_2(self.__chinese_simple, numbers=the_number)
chinese_simple_to_arabic = lambda self, the_number : self.arabic_to_english(self.chinese_simple_to_english(the_number))
chinese_simple_to_hindi = lambda self, the_number : self.english_to_hindi(self.chinese_simple_to_english(the_number))
chinese_simple_to_persian = lambda self, the_number : self.english_to_persian(self.chinese_simple_to_english(the_number))
chinese_simple_to_bengali = lambda self, the_number : self.english_to_bengali(self.chinese_simple_to_english(the_number))
chinese_simple_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.chinese_simple_to_english(the_number))
chinese_simple_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.chinese_simple_to_english(the_number))
chinese_simple_to_thai = lambda self, the_number : self.english_to_thai(self.chinese_simple_to_english(the_number))
chinese_simple_to_urdu = lambda self, the_number : self.english_to_urdu(self.chinese_simple_to_english(the_number))
# From Chinese Complex
chinese_complex_to_english = lambda self, the_number : self.__to_change_2(self.__chinese_complex, numbers=the_number)
chinese_complex_to_arabic = lambda self, the_number : self.arabic_to_english(self.chinese_complex_to_english(the_number))
chinese_complex_to_hindi = lambda self, the_number : self.english_to_hindi(self.chinese_complex_to_english(the_number))
chinese_complex_to_persian = lambda self, the_number : self.english_to_persian(self.chinese_complex_to_english(the_number))
chinese_complex_to_bengali = lambda self, the_number : self.english_to_bengali(self.chinese_complex_to_english(the_number))
chinese_complex_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.chinese_complex_to_english(the_number))
chinese_complex_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.chinese_complex_to_english(the_number))
chinese_complex_to_thai = lambda self, the_number : self.english_to_thai(self.chinese_complex_to_english(the_number))
chinese_complex_to_urdu = lambda self, the_number : self.english_to_urdu(self.chinese_complex_to_english(the_number))
# From Malayalam
malayalam_to_english = lambda self, the_number : self.__to_change_2(self.__malayalam, numbers=the_number)
malayalam_to_arabic = lambda self, the_number : self.arabic_to_english(self.malayalam_to_english(the_number))
malayalam_to_hindi = lambda self, the_number : self.english_to_hindi(self.malayalam_to_english(the_number))
malayalam_to_persian = lambda self, the_number : self.english_to_persian(self.malayalam_to_english(the_number))
malayalam_to_bengali = lambda self, the_number : self.english_to_bengali(self.malayalam_to_english(the_number))
malayalam_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.malayalam_to_english(the_number))
malayalam_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.malayalam_to_english(the_number))
malayalam_to_thai = lambda self, the_number : self.english_to_thai(self.malayalam_to_english(the_number))
malayalam_to_urdu = lambda self, the_number : self.english_to_urdu(self.malayalam_to_english(the_number))
# From Thai
thai_to_english = lambda self, the_number : self.__to_change_2(self.__thai, numbers=the_number)
thai_to_arabic = lambda self, the_number : self.arabic_to_english(self.thai_to_english(the_number))
thai_to_hindi = lambda self, the_number : self.english_to_hindi(self.thai_to_english(the_number))
thai_to_persian = lambda self, the_number : self.english_to_persian(self.thai_to_english(the_number))
thai_to_bengali = lambda self, the_number : self.english_to_bengali(self.thai_to_english(the_number))
thai_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.thai_to_english(the_number))
thai_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.thai_to_english(the_number))
thai_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.thai_to_english(the_number))
thai_to_urdu = lambda self, the_number : self.english_to_urdu(self.thai_to_english(the_number))
# From Urdu
urdu_to_english = lambda self, the_number : self.__to_change_2(self.__urdu, numbers=the_number)
urdu_to_arabic = lambda self, the_number : self.arabic_to_english(self.urdu_to_english(the_number))
urdu_to_hindi = lambda self, the_number : self.english_to_hindi(self.urdu_to_english(the_number))
urdu_to_persian = lambda self, the_number : self.english_to_persian(self.urdu_to_english(the_number))
urdu_to_bengali = lambda self, the_number : self.english_to_bengali(self.urdu_to_english(the_number))
urdu_to_chinese_simple = lambda self, the_number : self.english_to_chinese_simple(self.urdu_to_english(the_number))
urdu_to_chinese_complex = lambda self, the_number : self.english_to_chinese_complex(self.urdu_to_english(the_number))
urdu_to_thai = lambda self, the_number : self.english_to_thai(self.urdu_to_english(the_number))
urdu_to_malayalam = lambda self, the_number : self.english_to_malayalam(self.urdu_to_english(the_number))
# ##################################################
# def arabic_to_hindi(self, n):
# n = self.arabic_to_english(n)
# return self.english_to_hindi(n)
# def english_to_arabic(self, language):
# return self.to_change(self.__arabic)
# def english_to_arabic(self, numbers):
# num = []
# if isinstance(numbers, int):
# numbers = str(numbers)
# for i in enumerate(numbers):
# for n in self.__arabic:
# if i[1] in n:
# num.append(n[1])
# return ''.join(map(str, num))
# ################################################## | 48.599327 | 141 | 0.665581 | 1,990 | 14,434 | 4.379397 | 0.059296 | 0.185886 | 0.134251 | 0.196213 | 0.923351 | 0.871945 | 0.836374 | 0.824326 | 0.700287 | 0.623178 | 0 | 0.017802 | 0.198282 | 14,434 | 297 | 142 | 48.599327 | 0.735223 | 0.047527 | 0 | 0.234783 | 0 | 0 | 0.014749 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013043 | false | 0 | 0 | 0 | 0.417391 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5025962b04344f15be4bf9a9a76636e8fbf67782 | 178 | py | Python | npbench/benchmarks/polybench/gesummv/gesummv_pythran.py | frahlg/npbench | 1bc4d9e2e22f3ca67fa2bc7f40e2e751a9c8dd26 | [
"BSD-3-Clause"
] | 27 | 2021-05-10T11:49:13.000Z | 2022-03-22T18:07:19.000Z | npbench/benchmarks/polybench/gesummv/gesummv_pythran.py | frahlg/npbench | 1bc4d9e2e22f3ca67fa2bc7f40e2e751a9c8dd26 | [
"BSD-3-Clause"
] | 3 | 2021-12-01T13:03:17.000Z | 2022-03-17T10:53:00.000Z | npbench/benchmarks/polybench/gesummv/gesummv_pythran.py | frahlg/npbench | 1bc4d9e2e22f3ca67fa2bc7f40e2e751a9c8dd26 | [
"BSD-3-Clause"
] | 7 | 2021-06-24T03:40:25.000Z | 2022-01-26T09:04:33.000Z | import numpy as np
# pythran export kernel(float64, float64, float64[:,:], float64[:,:], float64[:])
def kernel(alpha, beta, A, B, x):
return alpha * A @ x + beta * B @ x
| 22.25 | 81 | 0.61236 | 26 | 178 | 4.192308 | 0.576923 | 0.513761 | 0.577982 | 0.513761 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070922 | 0.207865 | 178 | 7 | 82 | 25.428571 | 0.702128 | 0.44382 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 9 |
503c51725f1899ab03e1b173b629caacbbcb9e83 | 1,495 | py | Python | wrappers/python/tests/did/test_replace_keys_apply.py | nveskovic/indy-sdk | 24a8b49165969cdb016c679b343eb169d004fd1d | [
"Apache-2.0"
] | 5 | 2018-04-09T12:26:28.000Z | 2019-06-12T01:45:30.000Z | wrappers/python/tests/did/test_replace_keys_apply.py | nveskovic/indy-sdk | 24a8b49165969cdb016c679b343eb169d004fd1d | [
"Apache-2.0"
] | 9 | 2019-01-22T22:31:54.000Z | 2019-04-11T21:45:09.000Z | wrappers/python/tests/did/test_replace_keys_apply.py | nveskovic/indy-sdk | 24a8b49165969cdb016c679b343eb169d004fd1d | [
"Apache-2.0"
] | 19 | 2018-04-25T16:08:43.000Z | 2022-01-11T10:18:38.000Z | from indy import IndyError
from indy import did
from indy.error import ErrorCode
import pytest
@pytest.mark.asyncio
async def test_replace_keys_apply_works(wallet_handle):
(_did, _) = await did.create_and_store_my_did(wallet_handle, "{}")
await did.replace_keys_start(wallet_handle, _did, "{}")
await did.replace_keys_apply(wallet_handle, _did)
@pytest.mark.asyncio
async def test_replace_keys_apply_works_without_calling_replace_start(wallet_handle):
(_did, _) = await did.create_and_store_my_did(wallet_handle, "{}")
with pytest.raises(IndyError) as e:
await did.replace_keys_apply(wallet_handle, _did)
assert ErrorCode.WalletItemNotFound == e.value.error_code
@pytest.mark.asyncio
async def test_replace_keys_apply_works_for_unknown_did(wallet_handle, did_my1):
(_did, _) = await did.create_and_store_my_did(wallet_handle, "{}")
await did.replace_keys_start(wallet_handle, _did, "{}")
with pytest.raises(IndyError) as e:
await did.replace_keys_apply(wallet_handle, did_my1)
assert ErrorCode.WalletItemNotFound == e.value.error_code
@pytest.mark.asyncio
async def test_replace_keys_apply_works_invalid_wallet_handle(wallet_handle):
(_did, _) = await did.create_and_store_my_did(wallet_handle, "{}")
await did.replace_keys_start(wallet_handle, _did, "{}")
with pytest.raises(IndyError) as e:
await did.replace_keys_apply(wallet_handle + 1, _did)
assert ErrorCode.WalletInvalidHandle == e.value.error_code
| 38.333333 | 85 | 0.773244 | 212 | 1,495 | 5.028302 | 0.193396 | 0.180113 | 0.140713 | 0.124765 | 0.805816 | 0.805816 | 0.801126 | 0.801126 | 0.76454 | 0.76454 | 0 | 0.002306 | 0.129766 | 1,495 | 38 | 86 | 39.342105 | 0.817064 | 0 | 0 | 0.62069 | 0 | 0 | 0.009365 | 0 | 0 | 0 | 0 | 0 | 0.103448 | 1 | 0 | true | 0 | 0.137931 | 0 | 0.137931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.